var/home/core/zuul-output/0000755000175000017500000000000015114533575014536 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114545647015505 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005061632015114545641017706 0ustar rootrootDec 05 10:47:52 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 10:47:52 crc restorecon[4689]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:52 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:53 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:53 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:53 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:53 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:53 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:53 crc restorecon[4689]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 10:47:53 crc restorecon[4689]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 10:47:53 crc kubenswrapper[5014]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 10:47:53 crc kubenswrapper[5014]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 10:47:53 crc kubenswrapper[5014]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 10:47:53 crc kubenswrapper[5014]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 10:47:53 crc kubenswrapper[5014]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 10:47:53 crc kubenswrapper[5014]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.160771 5014 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165605 5014 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165633 5014 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165642 5014 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165649 5014 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165655 5014 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165660 5014 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165665 5014 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165673 5014 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165681 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165686 5014 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165691 5014 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165696 5014 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165700 5014 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165705 5014 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165710 5014 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165714 5014 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165718 5014 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165722 5014 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165727 5014 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165738 5014 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165743 5014 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165747 5014 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165752 5014 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165756 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165759 5014 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165763 5014 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165767 5014 feature_gate.go:330] unrecognized feature gate: Example Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165771 5014 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165776 5014 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165780 5014 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165785 5014 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165789 5014 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165793 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165797 5014 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165802 5014 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165806 5014 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165813 5014 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165820 5014 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165827 5014 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165833 5014 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165839 5014 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165844 5014 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165849 5014 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165853 5014 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165857 5014 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165862 5014 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165866 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165869 5014 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165874 5014 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165878 5014 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165882 5014 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165886 5014 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165889 5014 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165893 5014 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165896 5014 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165901 5014 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165904 5014 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165909 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165912 5014 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165915 5014 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165919 5014 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165924 5014 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165929 5014 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165933 5014 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165937 5014 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165941 5014 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165945 5014 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165950 5014 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165954 5014 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165958 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.165961 5014 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166047 5014 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166068 5014 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166081 5014 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166087 5014 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166093 5014 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166097 5014 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166103 5014 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166109 5014 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166114 5014 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166119 5014 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166123 5014 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166169 5014 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166175 5014 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166179 5014 flags.go:64] FLAG: --cgroup-root="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166183 5014 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166187 5014 flags.go:64] FLAG: --client-ca-file="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166192 5014 flags.go:64] FLAG: --cloud-config="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166196 5014 flags.go:64] FLAG: --cloud-provider="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166200 5014 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166216 5014 flags.go:64] FLAG: --cluster-domain="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166220 5014 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166224 5014 flags.go:64] FLAG: --config-dir="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166228 5014 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166233 5014 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166240 5014 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166244 5014 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166261 5014 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166266 5014 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166284 5014 flags.go:64] FLAG: --contention-profiling="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166288 5014 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166293 5014 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166297 5014 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166302 5014 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166308 5014 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166312 5014 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166316 5014 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166320 5014 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166324 5014 flags.go:64] FLAG: --enable-server="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166329 5014 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166338 5014 flags.go:64] FLAG: --event-burst="100" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166342 5014 flags.go:64] FLAG: --event-qps="50" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166347 5014 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166351 5014 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166355 5014 flags.go:64] FLAG: --eviction-hard="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166361 5014 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166365 5014 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166369 5014 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166379 5014 flags.go:64] FLAG: --eviction-soft="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166384 5014 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166388 5014 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166393 5014 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166397 5014 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166402 5014 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166406 5014 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166410 5014 flags.go:64] FLAG: --feature-gates="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166416 5014 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166420 5014 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166424 5014 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166429 5014 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166433 5014 flags.go:64] FLAG: --healthz-port="10248" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166438 5014 flags.go:64] FLAG: --help="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166442 5014 flags.go:64] FLAG: --hostname-override="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166446 5014 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166451 5014 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166455 5014 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166459 5014 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166464 5014 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166468 5014 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166472 5014 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166476 5014 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166480 5014 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166485 5014 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166489 5014 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166494 5014 flags.go:64] FLAG: --kube-reserved="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166499 5014 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166504 5014 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166508 5014 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166513 5014 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166518 5014 flags.go:64] FLAG: --lock-file="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166522 5014 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166527 5014 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166533 5014 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166543 5014 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166554 5014 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166558 5014 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166562 5014 flags.go:64] FLAG: --logging-format="text" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166567 5014 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166571 5014 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166576 5014 flags.go:64] FLAG: --manifest-url="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166580 5014 flags.go:64] FLAG: --manifest-url-header="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166586 5014 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166590 5014 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166597 5014 flags.go:64] FLAG: --max-pods="110" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166601 5014 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166606 5014 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166611 5014 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166616 5014 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166621 5014 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166630 5014 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166639 5014 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166659 5014 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166666 5014 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166670 5014 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166675 5014 flags.go:64] FLAG: --pod-cidr="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166679 5014 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166685 5014 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166689 5014 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166694 5014 flags.go:64] FLAG: --pods-per-core="0" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166698 5014 flags.go:64] FLAG: --port="10250" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166702 5014 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166706 5014 flags.go:64] FLAG: --provider-id="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166711 5014 flags.go:64] FLAG: --qos-reserved="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166715 5014 flags.go:64] FLAG: --read-only-port="10255" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166720 5014 flags.go:64] FLAG: --register-node="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166724 5014 flags.go:64] FLAG: --register-schedulable="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166728 5014 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166737 5014 flags.go:64] FLAG: --registry-burst="10" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166742 5014 flags.go:64] FLAG: --registry-qps="5" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166746 5014 flags.go:64] FLAG: --reserved-cpus="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166757 5014 flags.go:64] FLAG: --reserved-memory="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166763 5014 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166767 5014 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166772 5014 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166777 5014 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166781 5014 flags.go:64] FLAG: --runonce="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166786 5014 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166790 5014 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166794 5014 flags.go:64] FLAG: --seccomp-default="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166799 5014 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166803 5014 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166808 5014 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166813 5014 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166818 5014 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166822 5014 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166826 5014 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166831 5014 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166837 5014 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166848 5014 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166857 5014 flags.go:64] FLAG: --system-cgroups="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166863 5014 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166875 5014 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166880 5014 flags.go:64] FLAG: --tls-cert-file="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166886 5014 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166900 5014 flags.go:64] FLAG: --tls-min-version="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166906 5014 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166911 5014 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166916 5014 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166921 5014 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166926 5014 flags.go:64] FLAG: --v="2" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166934 5014 flags.go:64] FLAG: --version="false" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166942 5014 flags.go:64] FLAG: --vmodule="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166948 5014 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.166954 5014 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167086 5014 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167094 5014 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167107 5014 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167112 5014 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167118 5014 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167123 5014 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167128 5014 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167133 5014 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167138 5014 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167143 5014 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167148 5014 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167155 5014 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167161 5014 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167167 5014 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167171 5014 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167175 5014 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167180 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167185 5014 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167189 5014 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167194 5014 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167200 5014 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167205 5014 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167210 5014 feature_gate.go:330] unrecognized feature gate: Example Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167214 5014 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167218 5014 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167223 5014 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167227 5014 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167230 5014 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167234 5014 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167238 5014 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167242 5014 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167245 5014 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167249 5014 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167252 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167256 5014 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167259 5014 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167264 5014 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167303 5014 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167315 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167319 5014 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167323 5014 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167327 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167332 5014 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167337 5014 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167341 5014 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167353 5014 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167358 5014 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167361 5014 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167365 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167369 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167372 5014 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167376 5014 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167380 5014 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167384 5014 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167388 5014 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167391 5014 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167394 5014 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167398 5014 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167402 5014 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167405 5014 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167409 5014 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167412 5014 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167416 5014 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167419 5014 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167423 5014 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167426 5014 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167430 5014 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167433 5014 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167437 5014 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167440 5014 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.167444 5014 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.167455 5014 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.174754 5014 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.174796 5014 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174869 5014 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174877 5014 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174881 5014 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174888 5014 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174896 5014 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174901 5014 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174905 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174910 5014 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174914 5014 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174918 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174923 5014 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174927 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174931 5014 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174935 5014 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174939 5014 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174943 5014 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174949 5014 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174953 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174957 5014 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174962 5014 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174966 5014 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174971 5014 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174975 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174979 5014 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174983 5014 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174987 5014 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174991 5014 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174995 5014 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.174999 5014 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175005 5014 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175010 5014 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175017 5014 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175021 5014 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175026 5014 feature_gate.go:330] unrecognized feature gate: Example Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175032 5014 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175125 5014 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175131 5014 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175136 5014 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175141 5014 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175145 5014 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175149 5014 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175153 5014 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175157 5014 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175167 5014 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175171 5014 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175175 5014 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175184 5014 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175188 5014 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175194 5014 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175199 5014 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175204 5014 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175209 5014 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175213 5014 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175217 5014 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175222 5014 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175226 5014 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175230 5014 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175233 5014 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175238 5014 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175243 5014 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175248 5014 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175252 5014 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175257 5014 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175261 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175285 5014 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175290 5014 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175626 5014 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175635 5014 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175639 5014 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175645 5014 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175652 5014 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.175661 5014 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175783 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175790 5014 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175796 5014 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175802 5014 feature_gate.go:330] unrecognized feature gate: Example Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175807 5014 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175812 5014 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175817 5014 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175821 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175828 5014 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175832 5014 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175836 5014 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175841 5014 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175845 5014 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175849 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175853 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175860 5014 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175864 5014 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175868 5014 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175872 5014 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175876 5014 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175880 5014 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175884 5014 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175890 5014 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175896 5014 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175901 5014 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175905 5014 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175910 5014 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175914 5014 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175918 5014 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175922 5014 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175926 5014 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175930 5014 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175934 5014 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175938 5014 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175942 5014 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175947 5014 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175951 5014 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175955 5014 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175959 5014 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175963 5014 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175967 5014 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175971 5014 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175975 5014 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175979 5014 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175983 5014 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175987 5014 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175991 5014 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175995 5014 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.175998 5014 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176002 5014 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176006 5014 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176010 5014 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176014 5014 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176018 5014 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176022 5014 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176027 5014 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176031 5014 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176037 5014 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176043 5014 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176047 5014 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176052 5014 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176057 5014 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176061 5014 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176066 5014 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176071 5014 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176075 5014 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176080 5014 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176085 5014 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176091 5014 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176095 5014 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.176099 5014 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.176107 5014 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.176350 5014 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.180065 5014 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.180175 5014 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.180751 5014 server.go:997] "Starting client certificate rotation" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.180773 5014 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.180943 5014 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-20 22:35:32.885012425 +0000 UTC Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.181072 5014 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.186231 5014 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.187743 5014 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.187961 5014 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.129.56.110:6443: connect: connection refused" logger="UnhandledError" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.195403 5014 log.go:25] "Validated CRI v1 runtime API" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.209564 5014 log.go:25] "Validated CRI v1 image API" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.211478 5014 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.214809 5014 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-10-43-19-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.214883 5014 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.238721 5014 manager.go:217] Machine: {Timestamp:2025-12-05 10:47:53.236913132 +0000 UTC m=+0.185030886 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:85087ce8-dc93-48b4-8df2-1d14cd5a8c8f BootID:553a7abf-1287-4c60-9edc-6cc1ccaed34a Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:4c:ed:9d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:4c:ed:9d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:37:c5:7f Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:3b:6a:8d Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:d2:50:74 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:f2:83:73 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:2e:b1:54:ae:b2:d1 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:0a:aa:17:fe:6c:3c Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.239029 5014 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.239219 5014 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.240176 5014 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.240437 5014 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.240509 5014 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.240840 5014 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.240853 5014 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.241055 5014 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.241087 5014 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.241499 5014 state_mem.go:36] "Initialized new in-memory state store" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.241607 5014 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.242693 5014 kubelet.go:418] "Attempting to sync node with API server" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.242715 5014 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.242741 5014 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.242754 5014 kubelet.go:324] "Adding apiserver pod source" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.242767 5014 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.244662 5014 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.244698 5014 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.244750 5014 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.110:6443: connect: connection refused" logger="UnhandledError" Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.244750 5014 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.110:6443: connect: connection refused" logger="UnhandledError" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.244993 5014 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.245358 5014 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246082 5014 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246682 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246717 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246731 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246743 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246764 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246771 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246779 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246791 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246803 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246817 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246840 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.246862 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.247176 5014 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.247659 5014 server.go:1280] "Started kubelet" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.248389 5014 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.249729 5014 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.249064 5014 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 10:47:53 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.250858 5014 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.255810 5014 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.255886 5014 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.256492 5014 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.256539 5014 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.256773 5014 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.249939 5014 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.110:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e4c017cc2b411 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 10:47:53.247626257 +0000 UTC m=+0.195743971,LastTimestamp:2025-12-05 10:47:53.247626257 +0000 UTC m=+0.195743971,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.257111 5014 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 00:10:35.840337181 +0000 UTC Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.257916 5014 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.260144 5014 factory.go:55] Registering systemd factory Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.260179 5014 factory.go:221] Registration of the systemd container factory successfully Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.260881 5014 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.261066 5014 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.110:6443: connect: connection refused" logger="UnhandledError" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.261699 5014 factory.go:153] Registering CRI-O factory Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.261774 5014 factory.go:221] Registration of the crio container factory successfully Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.261873 5014 server.go:460] "Adding debug handlers to kubelet server" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.261962 5014 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.262009 5014 factory.go:103] Registering Raw factory Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.262044 5014 manager.go:1196] Started watching for new ooms in manager Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.263322 5014 manager.go:319] Starting recovery of all containers Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.265327 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" interval="200ms" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272310 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272394 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272406 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272417 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272433 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272468 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272477 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272489 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272500 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272510 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272520 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272528 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272538 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272556 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272564 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272605 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272650 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272661 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272673 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272684 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272696 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272705 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272715 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272728 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272740 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272751 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272820 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272943 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272956 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272966 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272976 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.272986 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273002 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273012 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273021 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273047 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273057 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273067 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273078 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273089 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273098 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273108 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273126 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273138 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273150 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273162 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273173 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273182 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273192 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273201 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273213 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273223 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273237 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273247 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273257 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273285 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273299 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273309 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273318 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273327 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273338 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273348 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273357 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273367 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273377 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273385 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273394 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273403 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273414 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273422 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273432 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273446 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273455 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273468 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273478 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273488 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273498 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273508 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273518 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273527 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273538 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273549 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273560 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273571 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273580 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273591 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273602 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273612 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273625 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273638 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273649 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273660 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273672 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273688 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273700 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273713 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273723 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273733 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273742 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273751 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273761 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273771 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273782 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273834 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273850 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273861 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273871 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273881 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273892 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273903 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273912 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273922 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273932 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273942 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273954 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273962 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273974 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273987 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.273998 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274009 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274019 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274028 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274039 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274048 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274057 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274066 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274078 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274087 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274096 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274106 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274116 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274126 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274136 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274145 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274156 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274166 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274175 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274185 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274196 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274204 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274214 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274223 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274233 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274283 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274308 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274319 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274331 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274339 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274348 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274358 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274394 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274405 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274416 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274427 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274436 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274446 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274456 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274466 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274475 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274486 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274495 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274505 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274515 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274524 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274532 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274542 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274553 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274564 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274574 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274585 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274595 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274604 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274613 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274623 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274633 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274646 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274657 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274666 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274676 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274689 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274700 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274710 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274719 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274732 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274743 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274757 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274770 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274790 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274803 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274821 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274833 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274865 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274883 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274895 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274907 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274926 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.274942 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.276767 5014 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.276833 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.276856 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.276931 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.276948 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.276998 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.277015 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.277027 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.277039 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.277050 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.277096 5014 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.277108 5014 reconstruct.go:97] "Volume reconstruction finished" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.277117 5014 reconciler.go:26] "Reconciler: start to sync state" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.289232 5014 manager.go:324] Recovery completed Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.301168 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.303701 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.303757 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.303771 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.305868 5014 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.305903 5014 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.305929 5014 state_mem.go:36] "Initialized new in-memory state store" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.314171 5014 policy_none.go:49] "None policy: Start" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.314808 5014 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.316364 5014 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.316406 5014 state_mem.go:35] "Initializing new in-memory state store" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.316510 5014 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.316573 5014 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.316905 5014 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.316976 5014 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.317772 5014 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.317875 5014 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.110:6443: connect: connection refused" logger="UnhandledError" Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.358149 5014 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.373545 5014 manager.go:334] "Starting Device Plugin manager" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.373861 5014 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.373950 5014 server.go:79] "Starting device plugin registration server" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.374642 5014 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.374750 5014 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.374951 5014 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.375100 5014 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.375118 5014 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.386437 5014 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.417377 5014 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.417555 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.419233 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.419337 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.419351 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.419524 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.419942 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.420060 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.421060 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.421105 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.421124 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.421401 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.421499 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.421559 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.421828 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.421866 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.421882 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.422524 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.422582 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.422594 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.422656 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.422679 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.422690 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.422912 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.423081 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.423144 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.423885 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.423926 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.423944 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.424046 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.424078 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.424089 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.424140 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.424256 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.424323 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.425196 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.425229 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.425241 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.425198 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.425305 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.425326 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.425447 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.425483 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.426419 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.426452 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.426465 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.466168 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" interval="400ms" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.475238 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.476805 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.476852 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.476867 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.476895 5014 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.477441 5014 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.110:6443: connect: connection refused" node="crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478613 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478671 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478696 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478721 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478739 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478761 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478780 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478834 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478888 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478916 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.478956 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.479002 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.479052 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.479125 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.479156 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.580707 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.580813 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.580851 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.580912 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.580942 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.580977 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581006 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581046 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581076 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581070 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581045 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581453 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581385 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581162 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581524 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581449 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581423 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581098 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581510 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581580 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581672 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581757 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581877 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581942 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.581991 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.582016 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.582081 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.582090 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.582105 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.582232 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.678316 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.680050 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.680114 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.680126 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.680157 5014 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.680824 5014 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.110:6443: connect: connection refused" node="crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.753612 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.760808 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.779260 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-22b5c41bf427fbda996343bc0e31681e782641882bbf72b817fc99b0cbaf4e35 WatchSource:0}: Error finding container 22b5c41bf427fbda996343bc0e31681e782641882bbf72b817fc99b0cbaf4e35: Status 404 returned error can't find the container with id 22b5c41bf427fbda996343bc0e31681e782641882bbf72b817fc99b0cbaf4e35 Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.781564 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-db180a21803938cd98fbde279d79772167c0d5dc080d2f832fb43c760a38ebbd WatchSource:0}: Error finding container db180a21803938cd98fbde279d79772167c0d5dc080d2f832fb43c760a38ebbd: Status 404 returned error can't find the container with id db180a21803938cd98fbde279d79772167c0d5dc080d2f832fb43c760a38ebbd Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.784903 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.801332 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-2abd80904b1b30ec7485458395fba72df6515395627a27eb71d0460fee5e281f WatchSource:0}: Error finding container 2abd80904b1b30ec7485458395fba72df6515395627a27eb71d0460fee5e281f: Status 404 returned error can't find the container with id 2abd80904b1b30ec7485458395fba72df6515395627a27eb71d0460fee5e281f Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.806899 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: I1205 10:47:53.812601 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:53 crc kubenswrapper[5014]: W1205 10:47:53.829709 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-90cf767ba150d4e373e9b4db7e24422fced2a37fda3fc38215fb65ec2c45e454 WatchSource:0}: Error finding container 90cf767ba150d4e373e9b4db7e24422fced2a37fda3fc38215fb65ec2c45e454: Status 404 returned error can't find the container with id 90cf767ba150d4e373e9b4db7e24422fced2a37fda3fc38215fb65ec2c45e454 Dec 05 10:47:53 crc kubenswrapper[5014]: E1205 10:47:53.867043 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" interval="800ms" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.081381 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.083647 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.083680 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.083690 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.083714 5014 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 10:47:54 crc kubenswrapper[5014]: E1205 10:47:54.084199 5014 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.110:6443: connect: connection refused" node="crc" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.251718 5014 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.257798 5014 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 11:13:27.093434676 +0000 UTC Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.257912 5014 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 864h25m32.835525413s for next certificate rotation Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.323236 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346"} Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.323392 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"90cf767ba150d4e373e9b4db7e24422fced2a37fda3fc38215fb65ec2c45e454"} Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.325818 5014 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139" exitCode=0 Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.325961 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139"} Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.326053 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e77ca950a6c5dfb7c404b9dec12cd187f9466c0c42f0b63870f6314eeea98a55"} Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.326210 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.328810 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.328862 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.328874 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.329954 5014 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc" exitCode=0 Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.329995 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc"} Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.330031 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2abd80904b1b30ec7485458395fba72df6515395627a27eb71d0460fee5e281f"} Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.330153 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.330852 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.331287 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.331319 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.331333 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.331705 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.331734 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.331744 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.332082 5014 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="922b1f5c5f2b298c0293d897f83012d4158eb87a00fbd02025026addb0c1a17f" exitCode=0 Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.332166 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"922b1f5c5f2b298c0293d897f83012d4158eb87a00fbd02025026addb0c1a17f"} Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.332212 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"22b5c41bf427fbda996343bc0e31681e782641882bbf72b817fc99b0cbaf4e35"} Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.332326 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.333397 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.333416 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.333427 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.334843 5014 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f" exitCode=0 Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.334882 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f"} Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.334949 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"db180a21803938cd98fbde279d79772167c0d5dc080d2f832fb43c760a38ebbd"} Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.335079 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.336113 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.336148 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.336158 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:54 crc kubenswrapper[5014]: W1205 10:47:54.559616 5014 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:54 crc kubenswrapper[5014]: E1205 10:47:54.559729 5014 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.110:6443: connect: connection refused" logger="UnhandledError" Dec 05 10:47:54 crc kubenswrapper[5014]: W1205 10:47:54.658796 5014 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:54 crc kubenswrapper[5014]: E1205 10:47:54.658902 5014 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.110:6443: connect: connection refused" logger="UnhandledError" Dec 05 10:47:54 crc kubenswrapper[5014]: E1205 10:47:54.667954 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" interval="1.6s" Dec 05 10:47:54 crc kubenswrapper[5014]: W1205 10:47:54.807588 5014 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:54 crc kubenswrapper[5014]: E1205 10:47:54.807706 5014 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.110:6443: connect: connection refused" logger="UnhandledError" Dec 05 10:47:54 crc kubenswrapper[5014]: W1205 10:47:54.811476 5014 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:54 crc kubenswrapper[5014]: E1205 10:47:54.811640 5014 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.110:6443: connect: connection refused" logger="UnhandledError" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.885247 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.888880 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.888944 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.888960 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:54 crc kubenswrapper[5014]: I1205 10:47:54.889004 5014 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 10:47:54 crc kubenswrapper[5014]: E1205 10:47:54.889884 5014 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.110:6443: connect: connection refused" node="crc" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.250911 5014 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.110:6443: connect: connection refused Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.343121 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.343170 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.343178 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.343197 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.345763 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.345801 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.345813 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.348203 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.348239 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.348253 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.348284 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.351740 5014 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67" exitCode=0 Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.351829 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.352028 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.353165 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.353197 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.353208 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.354979 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"db6335c267e8ca5202341dbd1e84244d1923f1ed68e7e6cbb3e30790a0f00a8d"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.355076 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.356210 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.356232 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.356243 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.366937 5014 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.369998 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.370051 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.370070 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f"} Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.370113 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.370917 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.370950 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.370959 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:55 crc kubenswrapper[5014]: I1205 10:47:55.665567 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.375471 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc"} Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.375614 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.376416 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.376443 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.376452 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.383211 5014 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17" exitCode=0 Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.383312 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17"} Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.383349 5014 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.383385 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.383510 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.383815 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.384473 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.384579 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.384604 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.384618 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.384591 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.384824 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.384741 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.384947 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.384963 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.490226 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.491631 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.491665 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.491674 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:56 crc kubenswrapper[5014]: I1205 10:47:56.491697 5014 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.389818 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.389954 5014 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.390044 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.390619 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50"} Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.390681 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a"} Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.390695 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4"} Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.390704 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b"} Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.390951 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.391004 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.391015 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.391285 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.391312 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:57 crc kubenswrapper[5014]: I1205 10:47:57.391321 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.286466 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.286646 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.287665 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.287700 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.287713 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.399455 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657"} Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.399559 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.400574 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.400609 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.400620 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.930195 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.930493 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.931717 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.931756 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:58 crc kubenswrapper[5014]: I1205 10:47:58.931771 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.199013 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.199219 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.200840 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.201182 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.201194 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.740773 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.740839 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.743724 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.748037 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.748069 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.748078 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.748816 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.748880 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.752701 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.752761 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.752776 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.752825 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.752849 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.752865 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.753730 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:47:59 crc kubenswrapper[5014]: I1205 10:47:59.951001 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.553038 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.746335 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.746340 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.746618 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.747669 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.748064 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.747779 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.748098 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.748222 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.748239 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.748157 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.748191 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:00 crc kubenswrapper[5014]: I1205 10:48:00.748446 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:02 crc kubenswrapper[5014]: I1205 10:48:02.199364 5014 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 10:48:02 crc kubenswrapper[5014]: I1205 10:48:02.200046 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 10:48:03 crc kubenswrapper[5014]: E1205 10:48:03.386647 5014 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 10:48:04 crc kubenswrapper[5014]: I1205 10:48:04.212970 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:48:04 crc kubenswrapper[5014]: I1205 10:48:04.213176 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:48:04 crc kubenswrapper[5014]: I1205 10:48:04.214632 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:04 crc kubenswrapper[5014]: I1205 10:48:04.215017 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:04 crc kubenswrapper[5014]: I1205 10:48:04.215090 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:04 crc kubenswrapper[5014]: I1205 10:48:04.218578 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:48:04 crc kubenswrapper[5014]: I1205 10:48:04.760774 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:48:04 crc kubenswrapper[5014]: I1205 10:48:04.762366 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:04 crc kubenswrapper[5014]: I1205 10:48:04.762407 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:04 crc kubenswrapper[5014]: I1205 10:48:04.762418 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:05 crc kubenswrapper[5014]: I1205 10:48:05.300987 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 10:48:05 crc kubenswrapper[5014]: I1205 10:48:05.301184 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:48:05 crc kubenswrapper[5014]: I1205 10:48:05.302397 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:05 crc kubenswrapper[5014]: I1205 10:48:05.302441 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:05 crc kubenswrapper[5014]: I1205 10:48:05.302452 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:05 crc kubenswrapper[5014]: E1205 10:48:05.368513 5014 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 10:48:05 crc kubenswrapper[5014]: I1205 10:48:05.723958 5014 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 10:48:05 crc kubenswrapper[5014]: I1205 10:48:05.724034 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 10:48:05 crc kubenswrapper[5014]: I1205 10:48:05.729905 5014 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 10:48:05 crc kubenswrapper[5014]: I1205 10:48:05.729991 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 10:48:08 crc kubenswrapper[5014]: I1205 10:48:08.931676 5014 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 10:48:08 crc kubenswrapper[5014]: I1205 10:48:08.931818 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.478686 5014 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.495829 5014 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.515606 5014 csr.go:261] certificate signing request csr-phwcb is approved, waiting to be issued Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.521979 5014 csr.go:257] certificate signing request csr-phwcb is issued Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.737801 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.738195 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.738480 5014 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.738548 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.739355 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.739386 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.739398 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.744312 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.773614 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.774156 5014 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.774221 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.774598 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.774629 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:09 crc kubenswrapper[5014]: I1205 10:48:09.774640 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.523423 5014 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-05 10:43:09 +0000 UTC, rotation deadline is 2026-10-13 00:37:54.679463796 +0000 UTC Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.523478 5014 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7477h49m44.155988933s for next certificate rotation Dec 05 10:48:10 crc kubenswrapper[5014]: E1205 10:48:10.715172 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.719858 5014 trace.go:236] Trace[2042506639]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 10:47:57.202) (total time: 13517ms): Dec 05 10:48:10 crc kubenswrapper[5014]: Trace[2042506639]: ---"Objects listed" error: 13517ms (10:48:10.719) Dec 05 10:48:10 crc kubenswrapper[5014]: Trace[2042506639]: [13.517467729s] [13.517467729s] END Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.719884 5014 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.720150 5014 trace.go:236] Trace[1935699165]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 10:47:57.244) (total time: 13475ms): Dec 05 10:48:10 crc kubenswrapper[5014]: Trace[1935699165]: ---"Objects listed" error: 13475ms (10:48:10.720) Dec 05 10:48:10 crc kubenswrapper[5014]: Trace[1935699165]: [13.47516623s] [13.47516623s] END Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.720179 5014 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 10:48:10 crc kubenswrapper[5014]: E1205 10:48:10.724601 5014 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.724896 5014 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.725335 5014 trace.go:236] Trace[2119484334]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 10:47:57.577) (total time: 13147ms): Dec 05 10:48:10 crc kubenswrapper[5014]: Trace[2119484334]: ---"Objects listed" error: 13147ms (10:48:10.725) Dec 05 10:48:10 crc kubenswrapper[5014]: Trace[2119484334]: [13.147779209s] [13.147779209s] END Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.725360 5014 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.725642 5014 trace.go:236] Trace[917086340]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 10:47:57.075) (total time: 13650ms): Dec 05 10:48:10 crc kubenswrapper[5014]: Trace[917086340]: ---"Objects listed" error: 13650ms (10:48:10.725) Dec 05 10:48:10 crc kubenswrapper[5014]: Trace[917086340]: [13.650554034s] [13.650554034s] END Dec 05 10:48:10 crc kubenswrapper[5014]: I1205 10:48:10.725660 5014 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.254578 5014 apiserver.go:52] "Watching apiserver" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.256635 5014 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.256924 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.257289 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.257353 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.257369 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.257415 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.257437 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.260459 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.260494 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.260557 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.260952 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.260975 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.261000 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.261636 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.263796 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.263880 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.264165 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.265172 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.265448 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.265668 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.295426 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.313170 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.330158 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.330225 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.330249 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.330779 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.331821 5014 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.349100 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.357492 5014 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.363622 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.363809 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.395042 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.413016 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.431019 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.431355 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.431443 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.431514 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.431581 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.431680 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.431748 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.431817 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.431897 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.431966 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432038 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432126 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432176 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432197 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432296 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432317 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432337 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432354 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432376 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432396 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432417 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432434 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432454 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432474 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432492 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432510 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432528 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432549 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432571 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432601 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432619 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432638 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432656 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432670 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432684 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432700 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432717 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432731 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432748 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433013 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433215 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433474 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.432762 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433608 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433637 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433668 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433695 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433719 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433744 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433771 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433779 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433799 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433903 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433931 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433953 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433970 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.433988 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434007 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434025 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434023 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434051 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434074 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434096 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434099 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434113 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434130 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434146 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434163 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434179 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434194 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434210 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434228 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434245 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434303 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434313 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434321 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434369 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434398 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434427 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434459 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434486 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434533 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434548 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434580 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434613 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434642 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434670 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434671 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434698 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434728 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434738 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434771 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434758 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434818 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434841 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434867 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434892 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434919 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434945 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434971 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434994 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435017 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435040 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435062 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435089 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435123 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435149 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435174 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435199 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435220 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435242 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435264 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435309 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435332 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435357 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435381 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435404 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435428 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435450 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435476 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435501 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435525 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435547 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435573 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435596 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435621 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435645 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435673 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435697 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435723 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435748 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435771 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435794 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435818 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435844 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435912 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435966 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435993 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436019 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436044 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436069 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436094 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436117 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436143 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436169 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436192 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436218 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436258 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436299 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436324 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436348 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436372 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436396 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436423 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436447 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436472 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436499 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436521 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436545 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436572 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436599 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436650 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.445674 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.445741 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446238 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446281 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446311 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446343 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446427 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446470 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446880 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447366 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447403 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447427 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447447 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447475 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447497 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447519 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447539 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447560 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447580 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447604 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447626 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447647 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447670 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447701 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447731 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447762 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447782 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447847 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.448506 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434837 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.459346 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.434992 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435067 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435069 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435072 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435064 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.459419 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435207 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435206 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435310 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435897 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435902 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435923 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.435923 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436014 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436088 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436093 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436138 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436333 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436375 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436380 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436395 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436454 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436593 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436617 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.436638 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.437819 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.437840 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.437814 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.438012 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.438041 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.438227 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.438349 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.438398 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.438430 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.438617 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.438633 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.438847 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.439040 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.439650 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.440097 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.440167 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.440195 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.440471 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.440617 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.440866 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.441096 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.441493 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.441563 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.441705 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.442147 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.442499 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.442870 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.443023 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.443025 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.443049 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.443346 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.443554 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.443825 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.459909 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.444090 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.444736 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.444745 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.444790 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.444868 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.445000 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.445198 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.445257 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.445341 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.445462 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.445471 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.442883 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446044 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446063 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446181 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446218 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.460177 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446453 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446534 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446792 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.446821 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447060 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.447224 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.448098 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.448469 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.448466 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.448735 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.448932 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.448765 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.449197 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.449174 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.448217 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.449323 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.449372 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.449478 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.449804 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.449808 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.449799 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.449906 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.449943 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:48:11.949918331 +0000 UTC m=+18.898036035 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.458916 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.459227 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.459208 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.460107 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.460402 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.460785 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.461236 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.461329 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.461260 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.461448 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.461558 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.461615 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.461658 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.461721 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.461868 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.461893 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.462027 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.462063 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.462061 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.462311 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.462341 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.462367 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.462516 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.462938 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463031 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463100 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463145 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463185 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463361 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463365 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463382 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463452 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463494 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463526 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463565 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463597 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463627 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463657 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463685 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463717 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463748 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463774 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463800 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463805 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463827 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463905 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463915 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463852 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.463976 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.464008 5014 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464023 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464059 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.464102 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:11.964073282 +0000 UTC m=+18.912191166 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464136 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464175 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464216 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464253 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464341 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464387 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464406 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464420 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464458 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.464672 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.465205 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.465133 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.465351 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.465456 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.465630 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.465722 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.465746 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.465844 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.465993 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.466392 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.466667 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.466687 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.467050 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.467247 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.467334 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.467356 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.467451 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.467521 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.468589 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.468796 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.469202 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.469347 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.469932 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.470534 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.471199 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.471438 5014 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.471520 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.471575 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:11.971556708 +0000 UTC m=+18.919674412 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.473132 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.473139 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.478397 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.478822 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.480187 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.480218 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.480233 5014 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480488 5014 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480537 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480554 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480569 5014 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480584 5014 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480599 5014 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480613 5014 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480627 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480641 5014 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480655 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480669 5014 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480686 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480702 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480717 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480729 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480742 5014 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480758 5014 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480773 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480786 5014 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480801 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480813 5014 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480826 5014 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480839 5014 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480855 5014 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480887 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480901 5014 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480913 5014 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480929 5014 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480942 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480955 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480968 5014 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480980 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.480992 5014 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481004 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481016 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481028 5014 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481043 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481087 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481101 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481112 5014 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481124 5014 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481137 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481150 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481162 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481174 5014 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481184 5014 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481205 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481217 5014 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481228 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481242 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481256 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481282 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481295 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481307 5014 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481319 5014 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481331 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481343 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481355 5014 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481366 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481380 5014 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481393 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481407 5014 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481417 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481429 5014 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481441 5014 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481453 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481466 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481479 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481490 5014 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481502 5014 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481514 5014 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481526 5014 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481539 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481552 5014 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481565 5014 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481577 5014 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481589 5014 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481601 5014 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481613 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481627 5014 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481639 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481650 5014 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481661 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481672 5014 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481684 5014 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481695 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481707 5014 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481719 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481731 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481744 5014 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481760 5014 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481773 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481786 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481798 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481810 5014 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481823 5014 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481834 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481847 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481860 5014 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481872 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481885 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481898 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481910 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481924 5014 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481940 5014 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481952 5014 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481968 5014 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481984 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.481997 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482010 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482026 5014 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482040 5014 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482052 5014 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482064 5014 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482078 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482091 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482107 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482122 5014 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482134 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482148 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482161 5014 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482173 5014 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482186 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482201 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482216 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482231 5014 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482245 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482260 5014 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482292 5014 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482306 5014 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482319 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482332 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482345 5014 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482360 5014 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482374 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482387 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482401 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482415 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482428 5014 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482443 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.482457 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.485668 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:11.985626927 +0000 UTC m=+18.933744631 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.485983 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.492072 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.492318 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.492342 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.492359 5014 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.492419 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:11.992398425 +0000 UTC m=+18.940516129 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.492755 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.493378 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.493587 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.495877 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.496466 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.496619 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.497425 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.498762 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.498940 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.501137 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.501202 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.504992 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.505371 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.508031 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.508352 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.508599 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.509165 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.509647 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.512311 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.512475 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.513466 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.513749 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.513866 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.515794 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.520782 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.523104 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.527010 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.548255 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.552720 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.563807 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.574394 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.578573 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584337 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584390 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584406 5014 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584415 5014 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584425 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584434 5014 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584446 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584471 5014 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584480 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584491 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584512 5014 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584521 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584530 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584539 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584548 5014 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584558 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584567 5014 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584575 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584584 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584593 5014 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584601 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584612 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584620 5014 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584629 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584637 5014 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584646 5014 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584662 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584671 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584680 5014 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584688 5014 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584697 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584706 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584714 5014 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584722 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584731 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584745 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584754 5014 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584762 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584771 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584781 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584792 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584809 5014 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584843 5014 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584854 5014 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584862 5014 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584871 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584881 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584890 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584900 5014 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584910 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584919 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584927 5014 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584936 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.584945 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.585003 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.585627 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.588743 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 10:48:11 crc kubenswrapper[5014]: W1205 10:48:11.591087 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-5409e3683d7061e0ba2c9e552b4ec9f0de61745a14e34baefbf4d94a5d218a37 WatchSource:0}: Error finding container 5409e3683d7061e0ba2c9e552b4ec9f0de61745a14e34baefbf4d94a5d218a37: Status 404 returned error can't find the container with id 5409e3683d7061e0ba2c9e552b4ec9f0de61745a14e34baefbf4d94a5d218a37 Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.592845 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.594846 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.612709 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.626749 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.641847 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.656467 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.673372 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.686765 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.695442 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-424mc"] Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.695806 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-lkk2g"] Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.696235 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.696498 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.696958 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-h6gwn"] Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.697343 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-cvtv5"] Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.697530 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-h6gwn" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.701994 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.702090 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.702160 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.702033 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.702629 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.703503 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.703968 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.704109 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.704123 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.705888 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.706168 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.706022 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.709139 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.709469 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.709704 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.710165 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.725441 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.744838 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.756666 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.768595 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.784825 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787014 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-run-multus-certs\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787094 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l445d\" (UniqueName: \"kubernetes.io/projected/f7894914-db6a-40a3-b46a-bf9e3a6b7fad-kube-api-access-l445d\") pod \"node-resolver-h6gwn\" (UID: \"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\") " pod="openshift-dns/node-resolver-h6gwn" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787131 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f8198e15-3b7a-4c40-b4b3-63382eba5846-cni-binary-copy\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787153 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-run-netns\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787177 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg4sq\" (UniqueName: \"kubernetes.io/projected/0c07b133-0b3c-4d10-95f9-23167e184681-kube-api-access-wg4sq\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787281 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0c07b133-0b3c-4d10-95f9-23167e184681-mcd-auth-proxy-config\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787336 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-run-k8s-cni-cncf-io\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787363 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0c07b133-0b3c-4d10-95f9-23167e184681-proxy-tls\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787399 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-daemon-config\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787443 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw77t\" (UniqueName: \"kubernetes.io/projected/f8198e15-3b7a-4c40-b4b3-63382eba5846-kube-api-access-xw77t\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787467 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-var-lib-cni-multus\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787491 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-tuning-conf-dir\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787517 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-system-cni-dir\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787533 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqmht\" (UniqueName: \"kubernetes.io/projected/cc769555-0222-432b-bc44-d0d75873d48c-kube-api-access-sqmht\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787556 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-cni-dir\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787572 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-etc-kubernetes\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787592 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-hostroot\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787609 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/cc769555-0222-432b-bc44-d0d75873d48c-cni-binary-copy\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787680 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f7894914-db6a-40a3-b46a-bf9e3a6b7fad-hosts-file\") pod \"node-resolver-h6gwn\" (UID: \"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\") " pod="openshift-dns/node-resolver-h6gwn" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787707 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-var-lib-cni-bin\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787728 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-cnibin\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787805 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/cc769555-0222-432b-bc44-d0d75873d48c-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787852 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-cnibin\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787875 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-socket-dir-parent\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787897 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-var-lib-kubelet\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787962 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-conf-dir\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.787986 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-os-release\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.788009 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0c07b133-0b3c-4d10-95f9-23167e184681-rootfs\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.788033 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-system-cni-dir\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.788061 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-os-release\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.797472 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"ebceb88daec465ba1e998e5d33654dd8387eef7ca574c1932ed58d66eb9faf76"} Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.800321 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"5409e3683d7061e0ba2c9e552b4ec9f0de61745a14e34baefbf4d94a5d218a37"} Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.801586 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.803351 5014 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc" exitCode=255 Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.803425 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc"} Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.805161 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.806108 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"94af0702be752fa4363a96afc86686cec2bf282347c4ab7a8921e6c9e76e29df"} Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.818433 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.818550 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.818849 5014 scope.go:117] "RemoveContainer" containerID="c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.834465 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.848197 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.860123 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.872823 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.887603 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889410 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-var-lib-cni-multus\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889449 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-tuning-conf-dir\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889476 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-system-cni-dir\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889500 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqmht\" (UniqueName: \"kubernetes.io/projected/cc769555-0222-432b-bc44-d0d75873d48c-kube-api-access-sqmht\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889522 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-etc-kubernetes\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889544 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-cni-dir\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889574 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-hostroot\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889604 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/cc769555-0222-432b-bc44-d0d75873d48c-cni-binary-copy\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889629 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f7894914-db6a-40a3-b46a-bf9e3a6b7fad-hosts-file\") pod \"node-resolver-h6gwn\" (UID: \"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\") " pod="openshift-dns/node-resolver-h6gwn" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889653 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-var-lib-cni-bin\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889703 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-cnibin\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889732 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-var-lib-kubelet\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889756 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-conf-dir\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889783 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/cc769555-0222-432b-bc44-d0d75873d48c-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889782 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-tuning-conf-dir\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889857 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-cnibin\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.889873 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-var-lib-cni-multus\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890441 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-socket-dir-parent\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890621 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-socket-dir-parent\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890640 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-os-release\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890660 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-cnibin\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890682 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0c07b133-0b3c-4d10-95f9-23167e184681-rootfs\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890749 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-system-cni-dir\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890785 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-os-release\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890817 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-run-multus-certs\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890848 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l445d\" (UniqueName: \"kubernetes.io/projected/f7894914-db6a-40a3-b46a-bf9e3a6b7fad-kube-api-access-l445d\") pod \"node-resolver-h6gwn\" (UID: \"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\") " pod="openshift-dns/node-resolver-h6gwn" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890871 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-run-netns\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.890892 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg4sq\" (UniqueName: \"kubernetes.io/projected/0c07b133-0b3c-4d10-95f9-23167e184681-kube-api-access-wg4sq\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891020 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f8198e15-3b7a-4c40-b4b3-63382eba5846-cni-binary-copy\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891045 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0c07b133-0b3c-4d10-95f9-23167e184681-mcd-auth-proxy-config\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891072 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-run-k8s-cni-cncf-io\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891094 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0c07b133-0b3c-4d10-95f9-23167e184681-proxy-tls\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891131 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-daemon-config\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891135 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-os-release\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891155 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw77t\" (UniqueName: \"kubernetes.io/projected/f8198e15-3b7a-4c40-b4b3-63382eba5846-kube-api-access-xw77t\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891287 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/cc769555-0222-432b-bc44-d0d75873d48c-cni-binary-copy\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891329 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-system-cni-dir\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891361 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-cni-dir\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891373 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-etc-kubernetes\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891370 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-run-k8s-cni-cncf-io\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891416 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-hostroot\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891448 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f7894914-db6a-40a3-b46a-bf9e3a6b7fad-hosts-file\") pod \"node-resolver-h6gwn\" (UID: \"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\") " pod="openshift-dns/node-resolver-h6gwn" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891468 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-os-release\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891486 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-var-lib-cni-bin\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891475 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-run-netns\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891516 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-conf-dir\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891483 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-run-multus-certs\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891504 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0c07b133-0b3c-4d10-95f9-23167e184681-rootfs\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891564 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/cc769555-0222-432b-bc44-d0d75873d48c-system-cni-dir\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891604 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-host-var-lib-kubelet\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.891636 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f8198e15-3b7a-4c40-b4b3-63382eba5846-cnibin\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.892109 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/cc769555-0222-432b-bc44-d0d75873d48c-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.892255 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f8198e15-3b7a-4c40-b4b3-63382eba5846-cni-binary-copy\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.892365 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f8198e15-3b7a-4c40-b4b3-63382eba5846-multus-daemon-config\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.892468 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0c07b133-0b3c-4d10-95f9-23167e184681-mcd-auth-proxy-config\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.898681 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0c07b133-0b3c-4d10-95f9-23167e184681-proxy-tls\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.902678 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.914983 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.915596 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw77t\" (UniqueName: \"kubernetes.io/projected/f8198e15-3b7a-4c40-b4b3-63382eba5846-kube-api-access-xw77t\") pod \"multus-424mc\" (UID: \"f8198e15-3b7a-4c40-b4b3-63382eba5846\") " pod="openshift-multus/multus-424mc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.915841 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg4sq\" (UniqueName: \"kubernetes.io/projected/0c07b133-0b3c-4d10-95f9-23167e184681-kube-api-access-wg4sq\") pod \"machine-config-daemon-cvtv5\" (UID: \"0c07b133-0b3c-4d10-95f9-23167e184681\") " pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.919639 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l445d\" (UniqueName: \"kubernetes.io/projected/f7894914-db6a-40a3-b46a-bf9e3a6b7fad-kube-api-access-l445d\") pod \"node-resolver-h6gwn\" (UID: \"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\") " pod="openshift-dns/node-resolver-h6gwn" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.921844 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqmht\" (UniqueName: \"kubernetes.io/projected/cc769555-0222-432b-bc44-d0d75873d48c-kube-api-access-sqmht\") pod \"multus-additional-cni-plugins-lkk2g\" (UID: \"cc769555-0222-432b-bc44-d0d75873d48c\") " pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.926902 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.928026 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.939307 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.955251 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.992026 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.992143 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.992178 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.992304 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.992323 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.992320 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:48:12.992241122 +0000 UTC m=+19.940358826 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.992335 5014 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.992381 5014 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.992445 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:12.992417156 +0000 UTC m=+19.940534860 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.992466 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:12.992458717 +0000 UTC m=+19.940576421 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:11 crc kubenswrapper[5014]: I1205 10:48:11.992505 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.992652 5014 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:11 crc kubenswrapper[5014]: E1205 10:48:11.992686 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:12.992678932 +0000 UTC m=+19.940796626 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.000499 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.011658 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.023199 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-424mc" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.041285 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.044549 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.047025 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-h6gwn" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.057172 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.065146 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: W1205 10:48:12.078829 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7894914_db6a_40a3_b46a_bf9e3a6b7fad.slice/crio-b533a7c28f96bc679073abf1b2549a06f1304bb2463318feb5ad35d4ae7b8b73 WatchSource:0}: Error finding container b533a7c28f96bc679073abf1b2549a06f1304bb2463318feb5ad35d4ae7b8b73: Status 404 returned error can't find the container with id b533a7c28f96bc679073abf1b2549a06f1304bb2463318feb5ad35d4ae7b8b73 Dec 05 10:48:12 crc kubenswrapper[5014]: W1205 10:48:12.086945 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc769555_0222_432b_bc44_d0d75873d48c.slice/crio-6060ade66a8fdb267b83621df840cb3a482e7333f042a8d950b1e2d3cddcb906 WatchSource:0}: Error finding container 6060ade66a8fdb267b83621df840cb3a482e7333f042a8d950b1e2d3cddcb906: Status 404 returned error can't find the container with id 6060ade66a8fdb267b83621df840cb3a482e7333f042a8d950b1e2d3cddcb906 Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.092679 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.092969 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-znfbl"] Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.093052 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:12 crc kubenswrapper[5014]: E1205 10:48:12.093224 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:12 crc kubenswrapper[5014]: E1205 10:48:12.093239 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:12 crc kubenswrapper[5014]: E1205 10:48:12.093251 5014 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:12 crc kubenswrapper[5014]: E1205 10:48:12.093305 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:13.093292456 +0000 UTC m=+20.041410160 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.093988 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.098555 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.098619 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.098754 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.098865 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.099747 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.099950 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.100100 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.113601 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.131737 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.150787 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.177853 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.193672 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-node-log\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.193711 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-netd\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.193736 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-ovn\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.193756 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-kubelet\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.193772 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-log-socket\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.193880 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-etc-openvswitch\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.193932 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-ovn-kubernetes\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194187 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovn-node-metrics-cert\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194217 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-env-overrides\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194243 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-openvswitch\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194283 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqw9s\" (UniqueName: \"kubernetes.io/projected/41fb1a99-1c51-4281-b73f-8a29357a0a2c-kube-api-access-gqw9s\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194327 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-script-lib\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194426 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194452 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-systemd\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194472 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-var-lib-openvswitch\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194491 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-bin\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194518 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-slash\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194551 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-netns\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194603 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-systemd-units\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.194622 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-config\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.207610 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.230631 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.248916 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.271695 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.293401 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.298807 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqw9s\" (UniqueName: \"kubernetes.io/projected/41fb1a99-1c51-4281-b73f-8a29357a0a2c-kube-api-access-gqw9s\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.298890 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-script-lib\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299416 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299450 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-systemd\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299496 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-var-lib-openvswitch\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299512 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-bin\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299528 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-slash\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299545 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-netns\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299607 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-systemd-units\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299675 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-config\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299756 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-node-log\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299778 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-netd\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299847 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-ovn\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299870 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-kubelet\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299914 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-log-socket\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299934 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-etc-openvswitch\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299949 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-ovn-kubernetes\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299966 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovn-node-metrics-cert\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299983 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-env-overrides\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.299999 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-openvswitch\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300042 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-openvswitch\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300067 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-script-lib\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300096 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-systemd\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300077 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300122 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-var-lib-openvswitch\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300145 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-netd\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300151 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-ovn\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300176 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-kubelet\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300177 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-bin\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300198 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-slash\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300211 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-log-socket\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300223 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-netns\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300236 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-etc-openvswitch\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300249 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-systemd-units\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300289 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-ovn-kubernetes\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300529 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-node-log\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.300823 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-config\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.301085 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-env-overrides\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.310130 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.316890 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovn-node-metrics-cert\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.320901 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqw9s\" (UniqueName: \"kubernetes.io/projected/41fb1a99-1c51-4281-b73f-8a29357a0a2c-kube-api-access-gqw9s\") pod \"ovnkube-node-znfbl\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.327160 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.354631 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.372167 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.403414 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.407292 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.465444 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.517620 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.555829 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.593340 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.810064 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.812339 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.812506 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.816111 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.816166 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.817534 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.818750 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerStarted","Data":"9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.818788 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerStarted","Data":"6060ade66a8fdb267b83621df840cb3a482e7333f042a8d950b1e2d3cddcb906"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.820466 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-424mc" event={"ID":"f8198e15-3b7a-4c40-b4b3-63382eba5846","Type":"ContainerStarted","Data":"0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.820493 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-424mc" event={"ID":"f8198e15-3b7a-4c40-b4b3-63382eba5846","Type":"ContainerStarted","Data":"95b7ae7c780164453576ebc14b06087a2c9e04b0aa762f54fcd64c26d2825a6e"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.821644 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f" exitCode=0 Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.821696 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.821713 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"6775976b158719b980925d5d8db944015a6a842e88f312654e326870a8481b0d"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.823281 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.823316 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.823326 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"7318d403402e5d00ce4c2196d283b4d043e7c4cb8695bee8f84e6781a8243545"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.824482 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-h6gwn" event={"ID":"f7894914-db6a-40a3-b46a-bf9e3a6b7fad","Type":"ContainerStarted","Data":"7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.824503 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-h6gwn" event={"ID":"f7894914-db6a-40a3-b46a-bf9e3a6b7fad","Type":"ContainerStarted","Data":"b533a7c28f96bc679073abf1b2549a06f1304bb2463318feb5ad35d4ae7b8b73"} Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.841416 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.859120 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.872648 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.892302 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.910000 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.927169 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.956654 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:12 crc kubenswrapper[5014]: I1205 10:48:12.982437 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:12Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.008356 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.008472 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.008512 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.008536 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.008522 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.008573 5014 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.008650 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:15.008628946 +0000 UTC m=+21.956746640 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.008657 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.008674 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.008708 5014 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.008756 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:15.008741538 +0000 UTC m=+21.956859242 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.008765 5014 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.008887 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:15.008857421 +0000 UTC m=+21.956975295 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.009000 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:48:15.008985854 +0000 UTC m=+21.957103558 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.064097 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.086971 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.109262 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.109425 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.109453 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.109469 5014 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.109533 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:15.109514926 +0000 UTC m=+22.057632630 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.121623 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.137324 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.157372 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.179109 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.182301 5014 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.182978 5014 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ovn-kubernetes/events\": read tcp 38.129.56.110:33014->38.129.56.110:6443: use of closed network connection" event="&Event{ObjectMeta:{ovnkube-node-znfbl.187e4c0620bf784c openshift-ovn-kubernetes 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-ovn-kubernetes,Name:ovnkube-node-znfbl,UID:41fb1a99-1c51-4281-b73f-8a29357a0a2c,APIVersion:v1,ResourceVersion:26698,FieldPath:spec.containers{ovn-acl-logging},},Reason:Created,Message:Created container ovn-acl-logging,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 10:48:13.178746956 +0000 UTC m=+20.126864660,LastTimestamp:2025-12-05 10:48:13.178746956 +0000 UTC m=+20.126864660,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.320490 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.320583 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.320679 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.320703 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.320830 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.320942 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.328962 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.329908 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.331340 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.332170 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.333461 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.334141 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.334988 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.336163 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.336929 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.338021 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.338664 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.340018 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.340728 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.341361 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.346996 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.347638 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.349022 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.349606 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.350257 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.351617 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.352172 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.353349 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.353839 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.360817 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.361781 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.362902 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.364777 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.365491 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.366729 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.367489 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.368575 5014 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.368705 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.373944 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.378822 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.379833 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.394439 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.399724 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.400752 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.402523 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.403463 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.405302 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.406092 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.408814 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.409646 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.410715 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.411403 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.413080 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.414155 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.415435 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.416092 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.416681 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.418372 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.419081 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.421474 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.829139 5014 generic.go:334] "Generic (PLEG): container finished" podID="cc769555-0222-432b-bc44-d0d75873d48c" containerID="9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13" exitCode=0 Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.829597 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerDied","Data":"9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13"} Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.832708 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f"} Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.832748 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00"} Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.832758 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536"} Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.925221 5014 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.927922 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.927969 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.927992 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.928113 5014 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.938070 5014 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.938454 5014 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.939821 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.939863 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.939875 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.939893 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.939906 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:13Z","lastTransitionTime":"2025-12-05T10:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:13 crc kubenswrapper[5014]: E1205 10:48:13.986255 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.994865 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.994900 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.994913 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.994928 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:13 crc kubenswrapper[5014]: I1205 10:48:13.994938 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:13Z","lastTransitionTime":"2025-12-05T10:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: E1205 10:48:14.015995 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.021549 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.021609 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.021624 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.021645 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.021663 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: E1205 10:48:14.041098 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.046915 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.046957 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.046966 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.046982 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.046991 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: E1205 10:48:14.067422 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.071229 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.071281 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.071296 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.071313 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.071324 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: E1205 10:48:14.091031 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: E1205 10:48:14.091201 5014 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.092738 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.092764 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.092774 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.092791 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.092807 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.195461 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.195499 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.195521 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.195543 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.195555 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.201051 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.222977 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.242385 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.266291 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.283817 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.300182 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.305474 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.305671 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.305765 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.305861 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.305971 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.322067 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.347435 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.361932 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.381019 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.404552 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.408473 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.408522 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.408538 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.408560 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.408605 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.436341 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.452078 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.468811 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.483889 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.499979 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.511063 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.511139 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.511150 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.511166 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.511178 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.517417 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.533892 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.546546 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.559712 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.574822 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.600254 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.613926 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.613968 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.613980 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.613997 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.614007 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.619686 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.645589 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.661319 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.680336 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.698746 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.717049 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.717099 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.717111 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.717130 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.717150 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.720209 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.739454 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.756616 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.777006 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.789010 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.808817 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.820593 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.820637 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.820648 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.820675 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.820691 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.827401 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.837284 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerStarted","Data":"7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.845780 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.845828 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.845838 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.846832 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.860157 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.873973 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.890812 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.904392 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.926839 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.926910 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.926923 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.926944 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.926956 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:14Z","lastTransitionTime":"2025-12-05T10:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.939242 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:14 crc kubenswrapper[5014]: I1205 10:48:14.992042 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.029994 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.030048 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.030064 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.030084 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.030097 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:15Z","lastTransitionTime":"2025-12-05T10:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.031214 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.040235 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.040362 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.040437 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:48:19.040404409 +0000 UTC m=+25.988522113 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.040494 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.040503 5014 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.040553 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.040594 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:19.040574673 +0000 UTC m=+25.988692377 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.040654 5014 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.040722 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.040728 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:19.040709676 +0000 UTC m=+25.988827570 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.040735 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.040748 5014 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.040791 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:19.040784668 +0000 UTC m=+25.988902362 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.075982 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.094495 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.115322 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.129664 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.134307 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.134356 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.134365 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.134384 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.134400 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:15Z","lastTransitionTime":"2025-12-05T10:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.141558 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.141777 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.141812 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.141826 5014 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.141895 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:19.141873313 +0000 UTC m=+26.089991217 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.145956 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.160932 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.172435 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.184525 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.236484 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.236526 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.236536 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.236553 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.236562 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:15Z","lastTransitionTime":"2025-12-05T10:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.317366 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.317395 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.317455 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.317500 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.317577 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:15 crc kubenswrapper[5014]: E1205 10:48:15.317677 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.323035 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.338897 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.340149 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.340196 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.340239 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.340259 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.340288 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:15Z","lastTransitionTime":"2025-12-05T10:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.340739 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.342390 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.353919 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.355462 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-sk4qz"] Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.355814 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.358972 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.359094 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.359154 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.359194 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.369547 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.385111 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.424815 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.442977 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.443013 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.443025 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.443042 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.443056 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:15Z","lastTransitionTime":"2025-12-05T10:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.444436 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285-host\") pod \"node-ca-sk4qz\" (UID: \"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\") " pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.444508 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvxsn\" (UniqueName: \"kubernetes.io/projected/0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285-kube-api-access-zvxsn\") pod \"node-ca-sk4qz\" (UID: \"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\") " pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.444548 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285-serviceca\") pod \"node-ca-sk4qz\" (UID: \"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\") " pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.469457 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.506093 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.545331 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285-host\") pod \"node-ca-sk4qz\" (UID: \"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\") " pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.545769 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvxsn\" (UniqueName: \"kubernetes.io/projected/0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285-kube-api-access-zvxsn\") pod \"node-ca-sk4qz\" (UID: \"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\") " pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.545815 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285-serviceca\") pod \"node-ca-sk4qz\" (UID: \"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\") " pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.545528 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285-host\") pod \"node-ca-sk4qz\" (UID: \"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\") " pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.546774 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285-serviceca\") pod \"node-ca-sk4qz\" (UID: \"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\") " pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.547461 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.547492 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.547501 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.547515 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.547526 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:15Z","lastTransitionTime":"2025-12-05T10:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.547610 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.575084 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvxsn\" (UniqueName: \"kubernetes.io/projected/0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285-kube-api-access-zvxsn\") pod \"node-ca-sk4qz\" (UID: \"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\") " pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.607223 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.648147 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.649478 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.649527 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.649538 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.649556 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.649567 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:15Z","lastTransitionTime":"2025-12-05T10:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.668649 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-sk4qz" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.695668 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.728834 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.751676 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.751713 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.751722 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.751739 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.751751 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:15Z","lastTransitionTime":"2025-12-05T10:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.767081 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.805168 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.846757 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.851445 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.852940 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.853000 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.853009 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.853021 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.853031 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:15Z","lastTransitionTime":"2025-12-05T10:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.853681 5014 generic.go:334] "Generic (PLEG): container finished" podID="cc769555-0222-432b-bc44-d0d75873d48c" containerID="7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade" exitCode=0 Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.853737 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerDied","Data":"7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.855436 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-sk4qz" event={"ID":"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285","Type":"ContainerStarted","Data":"830286cf2dbf345f1d29aa860ac15116c77a0d4b7a95fc06c9986a21f51517ce"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.888128 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.926900 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.956409 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.956476 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.956490 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.956513 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.956533 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:15Z","lastTransitionTime":"2025-12-05T10:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:15 crc kubenswrapper[5014]: I1205 10:48:15.965236 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.005352 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.047909 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.059057 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.059098 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.059108 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.059124 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.059135 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:16Z","lastTransitionTime":"2025-12-05T10:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.090567 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.128094 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.161404 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.161440 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.161450 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.161467 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.161478 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:16Z","lastTransitionTime":"2025-12-05T10:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.170195 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.212147 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.252303 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.263750 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.263786 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.263800 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.263819 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.263830 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:16Z","lastTransitionTime":"2025-12-05T10:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.290613 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.327656 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.366951 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.367009 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.367020 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.367040 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.367052 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:16Z","lastTransitionTime":"2025-12-05T10:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.369902 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.405406 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.448910 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.470010 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.470046 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.470058 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.470073 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.470083 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:16Z","lastTransitionTime":"2025-12-05T10:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.486442 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.525297 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.565160 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.572677 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.572704 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.572713 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.572726 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.572736 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:16Z","lastTransitionTime":"2025-12-05T10:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.606370 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.653248 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.674832 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.674873 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.674882 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.674898 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.674910 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:16Z","lastTransitionTime":"2025-12-05T10:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.685511 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.725504 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.767591 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.776423 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.776461 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.776470 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.776487 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.776504 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:16Z","lastTransitionTime":"2025-12-05T10:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.811530 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.844551 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.861959 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.863564 5014 generic.go:334] "Generic (PLEG): container finished" podID="cc769555-0222-432b-bc44-d0d75873d48c" containerID="5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0" exitCode=0 Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.863603 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerDied","Data":"5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.865876 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-sk4qz" event={"ID":"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285","Type":"ContainerStarted","Data":"60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.878735 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.878768 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.878780 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.878799 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.878813 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:16Z","lastTransitionTime":"2025-12-05T10:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.892291 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.926771 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.965399 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:16Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.981073 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.981110 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.981120 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.981136 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:16 crc kubenswrapper[5014]: I1205 10:48:16.981148 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:16Z","lastTransitionTime":"2025-12-05T10:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.009047 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.046652 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.084961 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.085007 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.085018 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.085037 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.085053 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:17Z","lastTransitionTime":"2025-12-05T10:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.086034 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.125002 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.167960 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.187678 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.187726 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.187741 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.187761 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.187775 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:17Z","lastTransitionTime":"2025-12-05T10:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.204470 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.247760 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.286924 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.291247 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.291347 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.291362 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.291377 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.291388 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:17Z","lastTransitionTime":"2025-12-05T10:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.317540 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.317552 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:17 crc kubenswrapper[5014]: E1205 10:48:17.317685 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:17 crc kubenswrapper[5014]: E1205 10:48:17.317821 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.317886 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:17 crc kubenswrapper[5014]: E1205 10:48:17.317946 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.327031 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.371233 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.399491 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.399541 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.399551 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.399569 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.399579 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:17Z","lastTransitionTime":"2025-12-05T10:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.415331 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.448538 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.485437 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.501422 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.501451 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.501459 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.501473 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.501482 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:17Z","lastTransitionTime":"2025-12-05T10:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.528186 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.566764 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.603503 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.603560 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.603569 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.603583 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.603593 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:17Z","lastTransitionTime":"2025-12-05T10:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.706686 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.706739 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.706755 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.706775 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.706788 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:17Z","lastTransitionTime":"2025-12-05T10:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.809114 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.809153 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.809164 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.809178 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.809188 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:17Z","lastTransitionTime":"2025-12-05T10:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.873149 5014 generic.go:334] "Generic (PLEG): container finished" podID="cc769555-0222-432b-bc44-d0d75873d48c" containerID="bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782" exitCode=0 Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.873223 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerDied","Data":"bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.889575 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.908766 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.911904 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.912105 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.912250 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.912374 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.912503 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:17Z","lastTransitionTime":"2025-12-05T10:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.920258 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.941547 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.962311 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.978159 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:17 crc kubenswrapper[5014]: I1205 10:48:17.997911 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:17Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.015214 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.015259 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.015292 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.015310 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.015322 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:18Z","lastTransitionTime":"2025-12-05T10:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.017477 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:18Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.030118 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:18Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.041102 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:18Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.057248 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:18Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.071066 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:18Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.086621 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:18Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.117767 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.117815 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.117824 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.117838 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.117848 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:18Z","lastTransitionTime":"2025-12-05T10:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.126260 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:18Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.165360 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:18Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.220026 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.220371 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.220456 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.220534 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.220651 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:18Z","lastTransitionTime":"2025-12-05T10:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.323391 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.323727 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.323799 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.323870 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.323933 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:18Z","lastTransitionTime":"2025-12-05T10:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.426516 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.426560 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.426575 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.426590 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.426600 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:18Z","lastTransitionTime":"2025-12-05T10:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.529769 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.529853 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.529873 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.529898 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.529918 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:18Z","lastTransitionTime":"2025-12-05T10:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.633728 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.633784 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.633802 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.633828 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.633843 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:18Z","lastTransitionTime":"2025-12-05T10:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.737215 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.737260 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.737286 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.737312 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.737323 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:18Z","lastTransitionTime":"2025-12-05T10:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.839773 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.839820 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.839829 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.839844 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.839857 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:18Z","lastTransitionTime":"2025-12-05T10:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.954544 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.954604 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.954616 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.954637 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:18 crc kubenswrapper[5014]: I1205 10:48:18.954653 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:18Z","lastTransitionTime":"2025-12-05T10:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.066742 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.067145 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.067160 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.067180 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.067192 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:19Z","lastTransitionTime":"2025-12-05T10:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.082362 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.082577 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:48:27.082552077 +0000 UTC m=+34.030669791 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.082861 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.082982 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.083139 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.083037 5014 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.083441 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:27.083431307 +0000 UTC m=+34.031549011 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.083113 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.083222 5014 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.083688 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:27.083675812 +0000 UTC m=+34.031793516 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.083845 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.083905 5014 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.084102 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:27.084089863 +0000 UTC m=+34.032207567 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.171097 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.171147 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.171157 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.171174 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.171185 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:19Z","lastTransitionTime":"2025-12-05T10:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.184568 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.184755 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.184783 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.184801 5014 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.184854 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:27.18483614 +0000 UTC m=+34.132953854 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.273253 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.273311 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.273324 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.273341 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.273352 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:19Z","lastTransitionTime":"2025-12-05T10:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.320096 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.320213 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.320304 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.320352 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.320392 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:19 crc kubenswrapper[5014]: E1205 10:48:19.320431 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.375515 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.375546 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.375555 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.375568 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.375580 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:19Z","lastTransitionTime":"2025-12-05T10:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.480532 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.480570 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.480585 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.480603 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.480615 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:19Z","lastTransitionTime":"2025-12-05T10:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.584135 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.584185 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.584200 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.584244 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.584263 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:19Z","lastTransitionTime":"2025-12-05T10:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.686919 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.687025 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.687039 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.687065 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.687079 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:19Z","lastTransitionTime":"2025-12-05T10:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.790241 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.790305 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.790325 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.790342 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.790354 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:19Z","lastTransitionTime":"2025-12-05T10:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.896616 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.896955 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.897028 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.897700 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.897740 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.897757 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.897779 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.897794 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:19Z","lastTransitionTime":"2025-12-05T10:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.901755 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerStarted","Data":"6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad"} Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.916355 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:19Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.932024 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.933258 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:19Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.948079 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:19Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.966416 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:19Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.985260 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:19Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:19 crc kubenswrapper[5014]: I1205 10:48:19.999515 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:19Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.000434 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.000461 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.000470 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.000483 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.000493 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:20Z","lastTransitionTime":"2025-12-05T10:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.013444 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.031815 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.044687 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.059559 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.073581 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.096786 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.103849 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.103886 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.103898 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.103915 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.103925 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:20Z","lastTransitionTime":"2025-12-05T10:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.118463 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.137968 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.150876 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.169619 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.192662 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.203632 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.208466 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.208505 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.208518 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.208534 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.208544 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:20Z","lastTransitionTime":"2025-12-05T10:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.218580 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.229188 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.242953 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.254198 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.265045 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.279470 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.303309 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.310884 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.310932 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.310945 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.310976 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.310991 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:20Z","lastTransitionTime":"2025-12-05T10:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.318090 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.336174 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.357075 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.405432 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.413524 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.413565 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.413576 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.413594 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.413606 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:20Z","lastTransitionTime":"2025-12-05T10:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.432463 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.516022 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.516065 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.516078 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.516098 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.516111 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:20Z","lastTransitionTime":"2025-12-05T10:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.618657 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.618926 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.619034 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.619115 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.619222 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:20Z","lastTransitionTime":"2025-12-05T10:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.721894 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.722139 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.722240 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.722332 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.722393 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:20Z","lastTransitionTime":"2025-12-05T10:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.824904 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.825314 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.825382 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.825471 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.825567 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:20Z","lastTransitionTime":"2025-12-05T10:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.908213 5014 generic.go:334] "Generic (PLEG): container finished" podID="cc769555-0222-432b-bc44-d0d75873d48c" containerID="6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad" exitCode=0 Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.908325 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerDied","Data":"6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.909107 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.928743 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.929946 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.929982 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.929995 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.930011 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.930023 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:20Z","lastTransitionTime":"2025-12-05T10:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.948690 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.952014 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.962878 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.978708 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:20 crc kubenswrapper[5014]: I1205 10:48:20.992938 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:20Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.002816 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.016164 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.029072 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.033546 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.033599 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.033612 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.033635 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.033650 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:21Z","lastTransitionTime":"2025-12-05T10:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.047833 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.072650 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.098909 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.119735 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.134876 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.137393 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.137419 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.137429 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.137447 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.137458 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:21Z","lastTransitionTime":"2025-12-05T10:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.148177 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.163154 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.176732 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.198846 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.223801 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.240501 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.240819 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.240836 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.240846 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.240859 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.240868 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:21Z","lastTransitionTime":"2025-12-05T10:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.256342 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.270486 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.288833 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.305745 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.317413 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:21 crc kubenswrapper[5014]: E1205 10:48:21.317533 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.317837 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:21 crc kubenswrapper[5014]: E1205 10:48:21.317898 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.317941 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:21 crc kubenswrapper[5014]: E1205 10:48:21.317982 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.324531 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.341846 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.343751 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.343789 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.343798 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.343817 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.343827 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:21Z","lastTransitionTime":"2025-12-05T10:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.361707 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.420392 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.439298 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.446145 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.446190 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.446202 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.446224 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.446235 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:21Z","lastTransitionTime":"2025-12-05T10:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.458008 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.479544 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.548501 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.548544 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.548559 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.548575 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.548586 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:21Z","lastTransitionTime":"2025-12-05T10:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.651144 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.651192 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.651207 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.651227 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.651239 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:21Z","lastTransitionTime":"2025-12-05T10:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.754589 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.754632 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.754644 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.754662 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.754674 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:21Z","lastTransitionTime":"2025-12-05T10:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.857140 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.857193 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.857206 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.857223 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.857237 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:21Z","lastTransitionTime":"2025-12-05T10:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.915152 5014 generic.go:334] "Generic (PLEG): container finished" podID="cc769555-0222-432b-bc44-d0d75873d48c" containerID="b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867" exitCode=0 Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.915210 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerDied","Data":"b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.941003 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.956166 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.965463 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.965515 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.965526 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.965542 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.965553 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:21Z","lastTransitionTime":"2025-12-05T10:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.978168 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:21 crc kubenswrapper[5014]: I1205 10:48:21.996381 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:21Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.022045 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.035925 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.052629 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.066863 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.068197 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.068236 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.068247 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.068264 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.068295 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:22Z","lastTransitionTime":"2025-12-05T10:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.079202 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.091381 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.103738 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.120096 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.134517 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.149126 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.162893 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.170740 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.170773 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.170782 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.170799 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.170811 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:22Z","lastTransitionTime":"2025-12-05T10:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.272701 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.272754 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.272764 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.272781 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.272793 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:22Z","lastTransitionTime":"2025-12-05T10:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.375546 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.375604 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.375614 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.375630 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.375643 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:22Z","lastTransitionTime":"2025-12-05T10:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.478593 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.478639 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.478650 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.478667 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.478678 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:22Z","lastTransitionTime":"2025-12-05T10:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.580862 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.580906 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.580915 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.580930 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.580944 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:22Z","lastTransitionTime":"2025-12-05T10:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.683142 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.683191 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.683205 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.683221 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.683232 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:22Z","lastTransitionTime":"2025-12-05T10:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.785513 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.785558 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.785574 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.785595 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.785611 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:22Z","lastTransitionTime":"2025-12-05T10:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.888389 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.888428 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.888437 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.888454 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.888464 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:22Z","lastTransitionTime":"2025-12-05T10:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.920507 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/0.log" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.924358 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612" exitCode=1 Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.924445 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.925407 5014 scope.go:117] "RemoveContainer" containerID="7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.929462 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" event={"ID":"cc769555-0222-432b-bc44-d0d75873d48c","Type":"ContainerStarted","Data":"28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.954306 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.970031 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.990822 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.990860 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.990874 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.990891 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.990901 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:22Z","lastTransitionTime":"2025-12-05T10:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:22 crc kubenswrapper[5014]: I1205 10:48:22.991549 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:22Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.015356 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.046052 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"message\\\":\\\"] [zone-nad-controller NAD controller]: shutting down\\\\nI1205 10:48:22.171818 6239 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 10:48:22.171830 6239 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 10:48:22.171844 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 10:48:22.171849 6239 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 10:48:22.171866 6239 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 10:48:22.171869 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 10:48:22.171912 6239 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 10:48:22.171917 6239 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 10:48:22.171975 6239 factory.go:656] Stopping watch factory\\\\nI1205 10:48:22.171990 6239 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 10:48:22.171997 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 10:48:22.172003 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 10:48:22.172009 6239 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 10:48:22.172110 6239 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.059871 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.074861 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.090246 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.093507 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.093558 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.093573 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.093593 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.093606 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:23Z","lastTransitionTime":"2025-12-05T10:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.106077 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.122628 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.136288 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.151642 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.165878 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.177753 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.188931 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.195768 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.195813 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.195824 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.195841 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.195850 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:23Z","lastTransitionTime":"2025-12-05T10:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.200351 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.211500 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.226100 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.240781 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.254257 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.269666 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.281598 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.293164 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.297722 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.297764 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.297772 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.297790 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.297802 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:23Z","lastTransitionTime":"2025-12-05T10:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.307217 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.317874 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.317946 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:23 crc kubenswrapper[5014]: E1205 10:48:23.318002 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.317945 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:23 crc kubenswrapper[5014]: E1205 10:48:23.318100 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:23 crc kubenswrapper[5014]: E1205 10:48:23.318236 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.322648 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.338025 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.359248 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"message\\\":\\\"] [zone-nad-controller NAD controller]: shutting down\\\\nI1205 10:48:22.171818 6239 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 10:48:22.171830 6239 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 10:48:22.171844 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 10:48:22.171849 6239 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 10:48:22.171866 6239 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 10:48:22.171869 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 10:48:22.171912 6239 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 10:48:22.171917 6239 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 10:48:22.171975 6239 factory.go:656] Stopping watch factory\\\\nI1205 10:48:22.171990 6239 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 10:48:22.171997 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 10:48:22.172003 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 10:48:22.172009 6239 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 10:48:22.172110 6239 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.381478 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.397135 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.400458 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.400504 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.400515 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.400531 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.400544 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:23Z","lastTransitionTime":"2025-12-05T10:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.412018 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.425179 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.441806 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.457695 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.475737 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.490431 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.503234 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.503715 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.503831 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.503952 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.504060 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:23Z","lastTransitionTime":"2025-12-05T10:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.504657 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.521832 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.536913 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.552197 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.566881 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.590096 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.605667 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.606785 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.606834 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.606845 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.606860 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.606868 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:23Z","lastTransitionTime":"2025-12-05T10:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.625671 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.645651 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.672975 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"message\\\":\\\"] [zone-nad-controller NAD controller]: shutting down\\\\nI1205 10:48:22.171818 6239 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 10:48:22.171830 6239 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 10:48:22.171844 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 10:48:22.171849 6239 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 10:48:22.171866 6239 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 10:48:22.171869 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 10:48:22.171912 6239 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 10:48:22.171917 6239 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 10:48:22.171975 6239 factory.go:656] Stopping watch factory\\\\nI1205 10:48:22.171990 6239 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 10:48:22.171997 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 10:48:22.172003 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 10:48:22.172009 6239 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 10:48:22.172110 6239 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.709434 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.709474 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.709484 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.709500 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.709509 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:23Z","lastTransitionTime":"2025-12-05T10:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.811950 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.811983 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.811992 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.812025 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.812036 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:23Z","lastTransitionTime":"2025-12-05T10:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.914986 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.915036 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.915048 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.915065 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.915080 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:23Z","lastTransitionTime":"2025-12-05T10:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.935239 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/0.log" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.938317 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59"} Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.938803 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.946371 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt"] Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.946807 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.951099 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.951505 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.955120 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.974481 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.986792 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:23 crc kubenswrapper[5014]: I1205 10:48:23.999619 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.011663 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.017198 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.017250 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.017263 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.017295 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.017309 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.025526 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.036696 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7e002dc5-a637-47bf-a201-4117a4fff39b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.036756 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7e002dc5-a637-47bf-a201-4117a4fff39b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.036790 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqwrp\" (UniqueName: \"kubernetes.io/projected/7e002dc5-a637-47bf-a201-4117a4fff39b-kube-api-access-cqwrp\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.037043 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7e002dc5-a637-47bf-a201-4117a4fff39b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.041540 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.057074 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.071625 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.090851 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"message\\\":\\\"] [zone-nad-controller NAD controller]: shutting down\\\\nI1205 10:48:22.171818 6239 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 10:48:22.171830 6239 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 10:48:22.171844 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 10:48:22.171849 6239 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 10:48:22.171866 6239 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 10:48:22.171869 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 10:48:22.171912 6239 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 10:48:22.171917 6239 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 10:48:22.171975 6239 factory.go:656] Stopping watch factory\\\\nI1205 10:48:22.171990 6239 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 10:48:22.171997 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 10:48:22.172003 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 10:48:22.172009 6239 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 10:48:22.172110 6239 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.116769 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.119553 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.119668 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.119757 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.119863 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.119964 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.133223 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.137935 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7e002dc5-a637-47bf-a201-4117a4fff39b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.138118 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7e002dc5-a637-47bf-a201-4117a4fff39b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.138254 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7e002dc5-a637-47bf-a201-4117a4fff39b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.138383 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqwrp\" (UniqueName: \"kubernetes.io/projected/7e002dc5-a637-47bf-a201-4117a4fff39b-kube-api-access-cqwrp\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.139041 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7e002dc5-a637-47bf-a201-4117a4fff39b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.139249 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7e002dc5-a637-47bf-a201-4117a4fff39b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.145321 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7e002dc5-a637-47bf-a201-4117a4fff39b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.157889 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqwrp\" (UniqueName: \"kubernetes.io/projected/7e002dc5-a637-47bf-a201-4117a4fff39b-kube-api-access-cqwrp\") pod \"ovnkube-control-plane-749d76644c-w4trt\" (UID: \"7e002dc5-a637-47bf-a201-4117a4fff39b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.160987 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.178243 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.191755 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.202890 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.213805 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.222661 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.222693 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.222716 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.222732 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.222744 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.227157 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.241101 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.249343 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.249399 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.249408 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.249425 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.249436 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.255690 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.259641 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" Dec 05 10:48:24 crc kubenswrapper[5014]: E1205 10:48:24.264771 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.268238 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.268989 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.269033 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.269046 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.269064 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.269077 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: W1205 10:48:24.278392 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e002dc5_a637_47bf_a201_4117a4fff39b.slice/crio-5cb25382f94a2382ee767edc8880b27ab9da51e1877135857c1e338b35d7bfbd WatchSource:0}: Error finding container 5cb25382f94a2382ee767edc8880b27ab9da51e1877135857c1e338b35d7bfbd: Status 404 returned error can't find the container with id 5cb25382f94a2382ee767edc8880b27ab9da51e1877135857c1e338b35d7bfbd Dec 05 10:48:24 crc kubenswrapper[5014]: E1205 10:48:24.286154 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.292461 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.292519 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.292533 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.292554 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.292567 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.298076 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"message\\\":\\\"] [zone-nad-controller NAD controller]: shutting down\\\\nI1205 10:48:22.171818 6239 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 10:48:22.171830 6239 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 10:48:22.171844 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 10:48:22.171849 6239 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 10:48:22.171866 6239 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 10:48:22.171869 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 10:48:22.171912 6239 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 10:48:22.171917 6239 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 10:48:22.171975 6239 factory.go:656] Stopping watch factory\\\\nI1205 10:48:22.171990 6239 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 10:48:22.171997 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 10:48:22.172003 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 10:48:22.172009 6239 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 10:48:22.172110 6239 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: E1205 10:48:24.307717 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.311822 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.311872 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.311883 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.311903 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.311915 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.319785 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: E1205 10:48:24.322332 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.325815 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.325847 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.325860 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.325876 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.325889 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.333391 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: E1205 10:48:24.337032 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: E1205 10:48:24.337150 5014 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.339193 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.339229 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.339239 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.339255 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.339279 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.346669 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.360175 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.373689 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.386146 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.398827 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.411745 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.422227 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.443595 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.443635 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.443677 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.443695 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.443706 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.545460 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.545502 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.545514 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.545530 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.545544 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.647946 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.647984 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.647994 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.648010 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.648023 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.750524 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.750575 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.750592 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.750610 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.750623 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.853000 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.853059 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.853073 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.853092 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.853103 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.951238 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" event={"ID":"7e002dc5-a637-47bf-a201-4117a4fff39b","Type":"ContainerStarted","Data":"5cb25382f94a2382ee767edc8880b27ab9da51e1877135857c1e338b35d7bfbd"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.953538 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/1.log" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.954464 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/0.log" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.954996 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.955023 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.955031 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.955047 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.955057 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:24Z","lastTransitionTime":"2025-12-05T10:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.957707 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59" exitCode=1 Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.957755 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59"} Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.958118 5014 scope.go:117] "RemoveContainer" containerID="7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.958494 5014 scope.go:117] "RemoveContainer" containerID="ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59" Dec 05 10:48:24 crc kubenswrapper[5014]: E1205 10:48:24.958717 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" Dec 05 10:48:24 crc kubenswrapper[5014]: I1205 10:48:24.989846 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.002883 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.022128 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.036363 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.056779 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"message\\\":\\\"] [zone-nad-controller NAD controller]: shutting down\\\\nI1205 10:48:22.171818 6239 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 10:48:22.171830 6239 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 10:48:22.171844 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 10:48:22.171849 6239 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 10:48:22.171866 6239 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 10:48:22.171869 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 10:48:22.171912 6239 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 10:48:22.171917 6239 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 10:48:22.171975 6239 factory.go:656] Stopping watch factory\\\\nI1205 10:48:22.171990 6239 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 10:48:22.171997 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 10:48:22.172003 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 10:48:22.172009 6239 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 10:48:22.172110 6239 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"tart network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:24.106679 6435 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf after 0 failed attempt(s)\\\\nI1205 10:48:24.106652 6435 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}\\\\nI1205 10:48:24.106687 6435 default_network_controller.go:776] Recording success event on pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 10:48:24.106703 6435 obj_retry.go:420] Function \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.059913 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.059954 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.059966 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.059985 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.059996 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:25Z","lastTransitionTime":"2025-12-05T10:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.070356 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.085090 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.106134 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.121516 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.134929 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.153929 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.162835 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.162873 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.162884 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.162901 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.162914 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:25Z","lastTransitionTime":"2025-12-05T10:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.169922 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.186388 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.203223 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.217986 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.230736 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.265118 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.265158 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.265168 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.265183 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.265193 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:25Z","lastTransitionTime":"2025-12-05T10:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.317335 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.317413 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.317470 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:25 crc kubenswrapper[5014]: E1205 10:48:25.317572 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:25 crc kubenswrapper[5014]: E1205 10:48:25.317683 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:25 crc kubenswrapper[5014]: E1205 10:48:25.317762 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.368483 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.368519 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.368532 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.368548 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.368560 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:25Z","lastTransitionTime":"2025-12-05T10:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.471210 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.471247 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.471259 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.471292 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.471307 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:25Z","lastTransitionTime":"2025-12-05T10:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.574701 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.574748 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.574760 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.574779 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.574792 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:25Z","lastTransitionTime":"2025-12-05T10:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.677411 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.677469 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.677486 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.677502 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.677514 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:25Z","lastTransitionTime":"2025-12-05T10:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.770159 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-vrt2x"] Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.770770 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:25 crc kubenswrapper[5014]: E1205 10:48:25.770856 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.779707 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.779754 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.779767 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.779782 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.779792 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:25Z","lastTransitionTime":"2025-12-05T10:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.786069 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.799344 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.812889 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.827631 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.843541 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.857064 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvsr9\" (UniqueName: \"kubernetes.io/projected/97abc013-62da-459c-b7ec-2a78304dcc56-kube-api-access-zvsr9\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.857112 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.863326 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.875701 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.883434 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.883482 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.883493 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.883509 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.883521 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:25Z","lastTransitionTime":"2025-12-05T10:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.890328 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.903039 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.914068 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.933307 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.945667 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.957648 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvsr9\" (UniqueName: \"kubernetes.io/projected/97abc013-62da-459c-b7ec-2a78304dcc56-kube-api-access-zvsr9\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.957693 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:25 crc kubenswrapper[5014]: E1205 10:48:25.957839 5014 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:25 crc kubenswrapper[5014]: E1205 10:48:25.957906 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs podName:97abc013-62da-459c-b7ec-2a78304dcc56 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:26.45788509 +0000 UTC m=+33.406002794 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs") pod "network-metrics-daemon-vrt2x" (UID: "97abc013-62da-459c-b7ec-2a78304dcc56") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.958481 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.964035 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" event={"ID":"7e002dc5-a637-47bf-a201-4117a4fff39b","Type":"ContainerStarted","Data":"d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.964071 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" event={"ID":"7e002dc5-a637-47bf-a201-4117a4fff39b","Type":"ContainerStarted","Data":"06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.965762 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/1.log" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.969598 5014 scope.go:117] "RemoveContainer" containerID="ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59" Dec 05 10:48:25 crc kubenswrapper[5014]: E1205 10:48:25.969825 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.976149 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.981495 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvsr9\" (UniqueName: \"kubernetes.io/projected/97abc013-62da-459c-b7ec-2a78304dcc56-kube-api-access-zvsr9\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.985549 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.985581 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.985593 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.985611 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.985624 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:25Z","lastTransitionTime":"2025-12-05T10:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:25 crc kubenswrapper[5014]: I1205 10:48:25.998205 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b021f534ac8a0bc2c6fc47500855bca438b505011d4295ebc8a2c0c93fa4612\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"message\\\":\\\"] [zone-nad-controller NAD controller]: shutting down\\\\nI1205 10:48:22.171818 6239 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 10:48:22.171830 6239 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 10:48:22.171844 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 10:48:22.171849 6239 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 10:48:22.171866 6239 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 10:48:22.171869 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 10:48:22.171912 6239 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 10:48:22.171917 6239 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 10:48:22.171975 6239 factory.go:656] Stopping watch factory\\\\nI1205 10:48:22.171990 6239 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 10:48:22.171997 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 10:48:22.172003 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 10:48:22.172009 6239 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 10:48:22.172110 6239 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"tart network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:24.106679 6435 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf after 0 failed attempt(s)\\\\nI1205 10:48:24.106652 6435 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}\\\\nI1205 10:48:24.106687 6435 default_network_controller.go:776] Recording success event on pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 10:48:24.106703 6435 obj_retry.go:420] Function \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:25Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.010829 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.023682 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.044470 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.058023 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.071642 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.087647 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.090088 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.090147 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.090164 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.090187 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.090203 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:26Z","lastTransitionTime":"2025-12-05T10:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.109149 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"tart network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:24.106679 6435 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf after 0 failed attempt(s)\\\\nI1205 10:48:24.106652 6435 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}\\\\nI1205 10:48:24.106687 6435 default_network_controller.go:776] Recording success event on pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 10:48:24.106703 6435 obj_retry.go:420] Function \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.122688 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.134771 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.149138 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.164387 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.175316 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.185612 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.192952 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.192999 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.193011 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.193030 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.193041 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:26Z","lastTransitionTime":"2025-12-05T10:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.198588 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.215118 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.229676 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.242481 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.255419 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.285244 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:26Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.295213 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.295298 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.295313 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.295336 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.295349 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:26Z","lastTransitionTime":"2025-12-05T10:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.397546 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.397589 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.397600 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.397619 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.397632 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:26Z","lastTransitionTime":"2025-12-05T10:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.462898 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:26 crc kubenswrapper[5014]: E1205 10:48:26.463030 5014 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:26 crc kubenswrapper[5014]: E1205 10:48:26.463759 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs podName:97abc013-62da-459c-b7ec-2a78304dcc56 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:27.463743937 +0000 UTC m=+34.411861641 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs") pod "network-metrics-daemon-vrt2x" (UID: "97abc013-62da-459c-b7ec-2a78304dcc56") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.500053 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.500096 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.500105 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.500119 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.500130 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:26Z","lastTransitionTime":"2025-12-05T10:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.602685 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.602723 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.602731 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.602745 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.602754 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:26Z","lastTransitionTime":"2025-12-05T10:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.704911 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.704949 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.704958 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.704973 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.704985 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:26Z","lastTransitionTime":"2025-12-05T10:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.808593 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.808664 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.808682 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.808710 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.808732 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:26Z","lastTransitionTime":"2025-12-05T10:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.912516 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.912600 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.912619 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.912653 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:26 crc kubenswrapper[5014]: I1205 10:48:26.912673 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:26Z","lastTransitionTime":"2025-12-05T10:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.015206 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.015257 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.015292 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.015312 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.015323 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:27Z","lastTransitionTime":"2025-12-05T10:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.119068 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.119114 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.119126 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.119146 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.119160 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:27Z","lastTransitionTime":"2025-12-05T10:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.170679 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.170943 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:48:43.170904725 +0000 UTC m=+50.119022439 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.171341 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.171479 5014 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.171563 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:43.17155078 +0000 UTC m=+50.119668494 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.171739 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.171916 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.171937 5014 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.172188 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:43.172167674 +0000 UTC m=+50.120285388 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.172045 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.172672 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.172798 5014 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.172956 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:43.172941913 +0000 UTC m=+50.121059627 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.221826 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.221869 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.221878 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.221899 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.221910 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:27Z","lastTransitionTime":"2025-12-05T10:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.272717 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.272951 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.272993 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.273005 5014 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.273071 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:43.273053085 +0000 UTC m=+50.221170789 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.317534 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.317679 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.318150 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.318220 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.318306 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.318367 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.318430 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.318505 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.324063 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.324093 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.324102 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.324113 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.324125 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:27Z","lastTransitionTime":"2025-12-05T10:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.427062 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.427383 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.427395 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.427644 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.427655 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:27Z","lastTransitionTime":"2025-12-05T10:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.474704 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.474862 5014 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:27 crc kubenswrapper[5014]: E1205 10:48:27.474935 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs podName:97abc013-62da-459c-b7ec-2a78304dcc56 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:29.474913559 +0000 UTC m=+36.423031263 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs") pod "network-metrics-daemon-vrt2x" (UID: "97abc013-62da-459c-b7ec-2a78304dcc56") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.529996 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.530045 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.530063 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.530085 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.530099 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:27Z","lastTransitionTime":"2025-12-05T10:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.632571 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.632633 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.632646 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.632665 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.632679 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:27Z","lastTransitionTime":"2025-12-05T10:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.734809 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.734854 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.734865 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.734884 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.734897 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:27Z","lastTransitionTime":"2025-12-05T10:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.838675 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.838731 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.838744 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.838763 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.838776 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:27Z","lastTransitionTime":"2025-12-05T10:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.941759 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.941829 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.941848 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.941873 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:27 crc kubenswrapper[5014]: I1205 10:48:27.941899 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:27Z","lastTransitionTime":"2025-12-05T10:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.044826 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.044895 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.044910 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.044942 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.044959 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:28Z","lastTransitionTime":"2025-12-05T10:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.148450 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.148586 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.148965 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.149352 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.149708 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:28Z","lastTransitionTime":"2025-12-05T10:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.252099 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.252139 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.252149 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.252163 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.252174 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:28Z","lastTransitionTime":"2025-12-05T10:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.355069 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.355134 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.355153 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.355175 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.355191 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:28Z","lastTransitionTime":"2025-12-05T10:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.458529 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.458591 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.458602 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.458621 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.458633 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:28Z","lastTransitionTime":"2025-12-05T10:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.561967 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.562033 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.562045 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.562064 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.562075 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:28Z","lastTransitionTime":"2025-12-05T10:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.664831 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.664903 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.664913 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.664937 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.664947 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:28Z","lastTransitionTime":"2025-12-05T10:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.767562 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.767642 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.767661 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.767691 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.767715 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:28Z","lastTransitionTime":"2025-12-05T10:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.871918 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.871986 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.872003 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.872028 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.872046 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:28Z","lastTransitionTime":"2025-12-05T10:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.934978 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.949345 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:28Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.963371 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:28Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.974724 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:28Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.975134 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.975178 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.975188 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.975205 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.975216 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:28Z","lastTransitionTime":"2025-12-05T10:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:28 crc kubenswrapper[5014]: I1205 10:48:28.989164 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:28Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.003602 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.015997 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.026954 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.039559 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.050656 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.061760 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.077544 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.077588 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.077601 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.077620 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.077633 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:29Z","lastTransitionTime":"2025-12-05T10:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.082052 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.095935 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.107893 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.120916 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.149008 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"tart network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:24.106679 6435 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf after 0 failed attempt(s)\\\\nI1205 10:48:24.106652 6435 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}\\\\nI1205 10:48:24.106687 6435 default_network_controller.go:776] Recording success event on pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 10:48:24.106703 6435 obj_retry.go:420] Function \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.164391 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.179325 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:29Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.181127 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.181166 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.181177 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.181193 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.181206 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:29Z","lastTransitionTime":"2025-12-05T10:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.283781 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.283836 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.283848 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.283865 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.283879 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:29Z","lastTransitionTime":"2025-12-05T10:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.317961 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.318165 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:29 crc kubenswrapper[5014]: E1205 10:48:29.318304 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.318361 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.318571 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:29 crc kubenswrapper[5014]: E1205 10:48:29.318674 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:29 crc kubenswrapper[5014]: E1205 10:48:29.318581 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:29 crc kubenswrapper[5014]: E1205 10:48:29.318934 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.386769 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.386810 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.386821 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.386837 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.386846 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:29Z","lastTransitionTime":"2025-12-05T10:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.490790 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.491181 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.491354 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.491500 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.491620 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:29Z","lastTransitionTime":"2025-12-05T10:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.499220 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:29 crc kubenswrapper[5014]: E1205 10:48:29.499408 5014 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:29 crc kubenswrapper[5014]: E1205 10:48:29.499467 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs podName:97abc013-62da-459c-b7ec-2a78304dcc56 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:33.499451274 +0000 UTC m=+40.447568978 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs") pod "network-metrics-daemon-vrt2x" (UID: "97abc013-62da-459c-b7ec-2a78304dcc56") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.595028 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.595075 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.595084 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.595102 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.595112 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:29Z","lastTransitionTime":"2025-12-05T10:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.698351 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.698687 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.698802 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.698868 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.698942 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:29Z","lastTransitionTime":"2025-12-05T10:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.801864 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.801924 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.801943 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.801966 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.801983 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:29Z","lastTransitionTime":"2025-12-05T10:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.905323 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.905387 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.905401 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.905423 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:29 crc kubenswrapper[5014]: I1205 10:48:29.905438 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:29Z","lastTransitionTime":"2025-12-05T10:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.008004 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.008045 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.008056 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.008075 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.008091 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:30Z","lastTransitionTime":"2025-12-05T10:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.110745 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.110791 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.110805 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.110822 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.110834 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:30Z","lastTransitionTime":"2025-12-05T10:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.213624 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.213675 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.213693 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.213714 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.213725 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:30Z","lastTransitionTime":"2025-12-05T10:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.315553 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.315598 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.315608 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.315624 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.315635 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:30Z","lastTransitionTime":"2025-12-05T10:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.418584 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.418692 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.418714 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.418743 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.418767 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:30Z","lastTransitionTime":"2025-12-05T10:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.521476 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.521521 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.521532 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.521548 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.521559 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:30Z","lastTransitionTime":"2025-12-05T10:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.624615 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.624663 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.624696 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.624717 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.624730 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:30Z","lastTransitionTime":"2025-12-05T10:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.728632 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.728678 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.728690 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.728708 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.728722 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:30Z","lastTransitionTime":"2025-12-05T10:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.831922 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.831976 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.831993 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.832020 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.832044 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:30Z","lastTransitionTime":"2025-12-05T10:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.934314 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.934381 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.934392 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.934407 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:30 crc kubenswrapper[5014]: I1205 10:48:30.934416 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:30Z","lastTransitionTime":"2025-12-05T10:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.036913 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.036955 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.036967 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.036985 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.036997 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:31Z","lastTransitionTime":"2025-12-05T10:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.140013 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.140053 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.140066 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.140082 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.140093 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:31Z","lastTransitionTime":"2025-12-05T10:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.243663 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.243719 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.243731 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.243753 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.243765 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:31Z","lastTransitionTime":"2025-12-05T10:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.318126 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.318178 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.318130 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:31 crc kubenswrapper[5014]: E1205 10:48:31.318303 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.318126 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:31 crc kubenswrapper[5014]: E1205 10:48:31.318462 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:31 crc kubenswrapper[5014]: E1205 10:48:31.318546 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:31 crc kubenswrapper[5014]: E1205 10:48:31.318600 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.346643 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.346685 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.346696 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.346710 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.346720 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:31Z","lastTransitionTime":"2025-12-05T10:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.449349 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.449416 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.449425 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.449440 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.449450 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:31Z","lastTransitionTime":"2025-12-05T10:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.552315 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.552371 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.552387 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.552410 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.552427 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:31Z","lastTransitionTime":"2025-12-05T10:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.655444 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.655485 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.655501 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.655522 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.655535 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:31Z","lastTransitionTime":"2025-12-05T10:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.758066 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.758119 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.758135 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.758156 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.758169 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:31Z","lastTransitionTime":"2025-12-05T10:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.861459 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.861505 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.861514 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.861527 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.861535 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:31Z","lastTransitionTime":"2025-12-05T10:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.963328 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.963369 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.963379 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.963394 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:31 crc kubenswrapper[5014]: I1205 10:48:31.963403 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:31Z","lastTransitionTime":"2025-12-05T10:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.065658 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.065736 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.065752 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.065775 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.065798 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:32Z","lastTransitionTime":"2025-12-05T10:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.168430 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.168469 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.168478 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.168495 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.168505 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:32Z","lastTransitionTime":"2025-12-05T10:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.271059 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.271107 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.271119 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.271140 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.271152 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:32Z","lastTransitionTime":"2025-12-05T10:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.373959 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.374039 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.374054 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.374074 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.374089 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:32Z","lastTransitionTime":"2025-12-05T10:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.477213 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.477287 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.477303 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.477324 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.477340 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:32Z","lastTransitionTime":"2025-12-05T10:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.582926 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.582980 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.582993 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.583016 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.583037 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:32Z","lastTransitionTime":"2025-12-05T10:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.685660 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.685713 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.685725 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.685743 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.685757 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:32Z","lastTransitionTime":"2025-12-05T10:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.788508 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.788552 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.788563 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.788579 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.788590 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:32Z","lastTransitionTime":"2025-12-05T10:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.891459 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.891906 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.892015 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.892125 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.892201 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:32Z","lastTransitionTime":"2025-12-05T10:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.995340 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.995717 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.995782 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.995858 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:32 crc kubenswrapper[5014]: I1205 10:48:32.995937 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:32Z","lastTransitionTime":"2025-12-05T10:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.098972 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.099057 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.099081 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.099113 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.099140 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:33Z","lastTransitionTime":"2025-12-05T10:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.201749 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.202058 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.202130 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.202197 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.202260 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:33Z","lastTransitionTime":"2025-12-05T10:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.304992 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.305578 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.305663 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.305763 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.305845 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:33Z","lastTransitionTime":"2025-12-05T10:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.317360 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.317360 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.317370 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:33 crc kubenswrapper[5014]: E1205 10:48:33.317477 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.317545 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:33 crc kubenswrapper[5014]: E1205 10:48:33.317695 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:33 crc kubenswrapper[5014]: E1205 10:48:33.317797 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:33 crc kubenswrapper[5014]: E1205 10:48:33.317879 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.341119 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.356476 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.371126 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.388667 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.408377 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.408419 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.408431 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.408450 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.408461 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:33Z","lastTransitionTime":"2025-12-05T10:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.408663 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"tart network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:24.106679 6435 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf after 0 failed attempt(s)\\\\nI1205 10:48:24.106652 6435 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}\\\\nI1205 10:48:24.106687 6435 default_network_controller.go:776] Recording success event on pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 10:48:24.106703 6435 obj_retry.go:420] Function \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.421742 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.434239 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.448652 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.466646 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.479633 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.493612 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.506442 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.511662 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.511720 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.511735 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.511761 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.511775 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:33Z","lastTransitionTime":"2025-12-05T10:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.522822 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.541368 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.544557 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:33 crc kubenswrapper[5014]: E1205 10:48:33.544952 5014 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:33 crc kubenswrapper[5014]: E1205 10:48:33.545070 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs podName:97abc013-62da-459c-b7ec-2a78304dcc56 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:41.545045742 +0000 UTC m=+48.493163436 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs") pod "network-metrics-daemon-vrt2x" (UID: "97abc013-62da-459c-b7ec-2a78304dcc56") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.556184 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.572040 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.586807 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:33Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.615078 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.615220 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.615234 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.615253 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.615265 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:33Z","lastTransitionTime":"2025-12-05T10:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.718511 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.718583 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.718594 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.718615 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.718631 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:33Z","lastTransitionTime":"2025-12-05T10:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.821747 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.821810 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.821821 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.821841 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.821854 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:33Z","lastTransitionTime":"2025-12-05T10:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.924755 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.924802 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.924811 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.924828 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:33 crc kubenswrapper[5014]: I1205 10:48:33.924838 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:33Z","lastTransitionTime":"2025-12-05T10:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.027478 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.027541 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.027554 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.027578 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.027592 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.131187 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.131254 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.131293 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.131322 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.131340 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.235554 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.235956 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.236083 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.236298 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.236398 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.339638 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.340486 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.340557 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.340628 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.340714 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.445611 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.445658 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.445668 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.445684 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.445718 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.503632 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.503709 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.503719 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.503739 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.503758 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: E1205 10:48:34.517412 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:34Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.522465 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.522734 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.522851 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.522948 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.523071 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: E1205 10:48:34.543506 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:34Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.549350 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.549414 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.549427 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.549451 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.549468 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: E1205 10:48:34.563348 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:34Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.568989 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.569252 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.569410 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.569558 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.569669 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: E1205 10:48:34.587413 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:34Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.592947 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.593035 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.593079 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.593101 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.593119 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: E1205 10:48:34.608690 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:34Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:34 crc kubenswrapper[5014]: E1205 10:48:34.608869 5014 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.610522 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.610565 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.610575 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.610591 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.610602 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.713857 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.713918 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.713931 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.713950 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.713961 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.817347 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.817405 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.817418 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.817439 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.817453 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.920407 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.920712 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.920914 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.920985 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:34 crc kubenswrapper[5014]: I1205 10:48:34.921059 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:34Z","lastTransitionTime":"2025-12-05T10:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.023403 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.023462 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.023471 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.023487 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.023496 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:35Z","lastTransitionTime":"2025-12-05T10:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.126746 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.126820 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.126834 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.126855 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.126870 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:35Z","lastTransitionTime":"2025-12-05T10:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.231219 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.231775 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.231952 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.232099 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.232232 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:35Z","lastTransitionTime":"2025-12-05T10:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.318205 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.318366 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:35 crc kubenswrapper[5014]: E1205 10:48:35.318416 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.318366 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:35 crc kubenswrapper[5014]: E1205 10:48:35.318597 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.318693 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:35 crc kubenswrapper[5014]: E1205 10:48:35.318878 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:35 crc kubenswrapper[5014]: E1205 10:48:35.319025 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.337728 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.337833 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.337848 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.337888 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.337904 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:35Z","lastTransitionTime":"2025-12-05T10:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.441917 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.441976 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.441989 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.442013 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.442027 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:35Z","lastTransitionTime":"2025-12-05T10:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.545840 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.545918 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.545937 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.546010 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.546029 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:35Z","lastTransitionTime":"2025-12-05T10:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.649594 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.649686 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.649728 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.649751 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.649766 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:35Z","lastTransitionTime":"2025-12-05T10:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.752833 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.752929 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.752953 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.752993 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.753016 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:35Z","lastTransitionTime":"2025-12-05T10:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.854861 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.854905 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.854919 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.854934 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.854946 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:35Z","lastTransitionTime":"2025-12-05T10:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.958981 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.959072 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.959089 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.959119 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:35 crc kubenswrapper[5014]: I1205 10:48:35.959140 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:35Z","lastTransitionTime":"2025-12-05T10:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.062910 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.063002 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.063026 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.063051 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.063065 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:36Z","lastTransitionTime":"2025-12-05T10:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.166931 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.166990 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.167008 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.167033 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.167051 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:36Z","lastTransitionTime":"2025-12-05T10:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.269688 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.269754 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.269772 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.269793 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.269805 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:36Z","lastTransitionTime":"2025-12-05T10:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.375101 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.375169 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.375186 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.375391 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.375562 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:36Z","lastTransitionTime":"2025-12-05T10:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.479333 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.479402 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.479424 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.479451 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.479469 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:36Z","lastTransitionTime":"2025-12-05T10:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.582814 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.582962 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.582983 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.583024 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.583069 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:36Z","lastTransitionTime":"2025-12-05T10:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.686752 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.687237 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.687256 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.687308 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.687324 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:36Z","lastTransitionTime":"2025-12-05T10:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.790530 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.790615 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.790638 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.790671 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.790694 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:36Z","lastTransitionTime":"2025-12-05T10:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.894399 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.894449 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.894460 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.894479 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.894491 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:36Z","lastTransitionTime":"2025-12-05T10:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.997700 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.997768 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.997779 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.997808 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:36 crc kubenswrapper[5014]: I1205 10:48:36.997821 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:36Z","lastTransitionTime":"2025-12-05T10:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.101327 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.101397 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.101410 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.101433 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.101449 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:37Z","lastTransitionTime":"2025-12-05T10:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.205036 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.205097 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.205109 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.205127 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.205140 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:37Z","lastTransitionTime":"2025-12-05T10:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.308228 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.308357 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.308596 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.308624 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.308640 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:37Z","lastTransitionTime":"2025-12-05T10:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.317762 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.317834 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:37 crc kubenswrapper[5014]: E1205 10:48:37.317907 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.317948 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:37 crc kubenswrapper[5014]: E1205 10:48:37.318001 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.318021 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:37 crc kubenswrapper[5014]: E1205 10:48:37.318103 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:37 crc kubenswrapper[5014]: E1205 10:48:37.318339 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.411703 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.412098 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.412180 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.412261 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.412369 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:37Z","lastTransitionTime":"2025-12-05T10:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.517374 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.517475 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.517497 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.517527 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.517547 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:37Z","lastTransitionTime":"2025-12-05T10:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.621706 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.622141 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.622398 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.622569 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.622773 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:37Z","lastTransitionTime":"2025-12-05T10:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.726171 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.726632 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.726728 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.726833 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.726921 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:37Z","lastTransitionTime":"2025-12-05T10:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.831525 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.831575 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.831591 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.831608 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.831621 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:37Z","lastTransitionTime":"2025-12-05T10:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.934607 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.934668 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.934681 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.934703 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:37 crc kubenswrapper[5014]: I1205 10:48:37.934719 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:37Z","lastTransitionTime":"2025-12-05T10:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.037154 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.037207 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.037219 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.037237 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.037251 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:38Z","lastTransitionTime":"2025-12-05T10:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.140388 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.140443 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.140455 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.140476 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.140492 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:38Z","lastTransitionTime":"2025-12-05T10:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.243864 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.243909 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.243919 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.243940 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.243952 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:38Z","lastTransitionTime":"2025-12-05T10:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.346633 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.346682 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.346691 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.346707 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.346716 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:38Z","lastTransitionTime":"2025-12-05T10:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.449787 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.449838 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.449847 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.449866 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.449877 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:38Z","lastTransitionTime":"2025-12-05T10:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.553108 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.553172 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.553189 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.553214 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.553228 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:38Z","lastTransitionTime":"2025-12-05T10:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.656670 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.656724 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.656741 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.656763 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.656778 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:38Z","lastTransitionTime":"2025-12-05T10:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.759557 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.759619 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.759637 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.759662 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.759681 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:38Z","lastTransitionTime":"2025-12-05T10:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.862591 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.862637 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.862648 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.862665 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.862677 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:38Z","lastTransitionTime":"2025-12-05T10:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.966731 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.966906 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.966942 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.967029 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:38 crc kubenswrapper[5014]: I1205 10:48:38.967050 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:38Z","lastTransitionTime":"2025-12-05T10:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.070425 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.070499 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.070525 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.070559 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.070582 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:39Z","lastTransitionTime":"2025-12-05T10:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.173067 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.173113 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.173124 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.173145 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.173157 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:39Z","lastTransitionTime":"2025-12-05T10:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.277435 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.277522 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.277543 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.277580 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.277602 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:39Z","lastTransitionTime":"2025-12-05T10:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.317682 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.317777 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.317799 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:39 crc kubenswrapper[5014]: E1205 10:48:39.317941 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.317971 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:39 crc kubenswrapper[5014]: E1205 10:48:39.318153 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:39 crc kubenswrapper[5014]: E1205 10:48:39.318361 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:39 crc kubenswrapper[5014]: E1205 10:48:39.318530 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.380636 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.380703 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.380716 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.380738 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.380755 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:39Z","lastTransitionTime":"2025-12-05T10:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.484186 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.484230 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.484240 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.484259 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.484288 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:39Z","lastTransitionTime":"2025-12-05T10:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.588506 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.588565 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.588579 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.588604 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.588617 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:39Z","lastTransitionTime":"2025-12-05T10:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.691350 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.691424 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.691435 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.691453 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.691467 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:39Z","lastTransitionTime":"2025-12-05T10:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.794798 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.794859 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.794876 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.794901 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.794920 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:39Z","lastTransitionTime":"2025-12-05T10:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.898435 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.898534 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.898555 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.898588 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:39 crc kubenswrapper[5014]: I1205 10:48:39.898612 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:39Z","lastTransitionTime":"2025-12-05T10:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.001008 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.001065 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.001078 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.001099 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.001114 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:40Z","lastTransitionTime":"2025-12-05T10:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.105990 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.106048 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.106057 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.106073 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.106083 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:40Z","lastTransitionTime":"2025-12-05T10:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.209075 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.209129 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.209142 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.209158 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.209169 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:40Z","lastTransitionTime":"2025-12-05T10:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.312818 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.312879 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.312900 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.312925 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.312944 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:40Z","lastTransitionTime":"2025-12-05T10:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.416861 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.416957 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.416976 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.417000 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.417042 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:40Z","lastTransitionTime":"2025-12-05T10:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.520652 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.520756 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.520770 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.520792 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.520810 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:40Z","lastTransitionTime":"2025-12-05T10:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.623770 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.623818 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.623829 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.623848 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.623863 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:40Z","lastTransitionTime":"2025-12-05T10:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.728115 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.728169 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.728177 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.728201 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.728212 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:40Z","lastTransitionTime":"2025-12-05T10:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.832492 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.832544 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.832556 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.832577 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.832590 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:40Z","lastTransitionTime":"2025-12-05T10:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.935486 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.935543 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.935556 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.935574 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:40 crc kubenswrapper[5014]: I1205 10:48:40.935586 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:40Z","lastTransitionTime":"2025-12-05T10:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.040060 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.040126 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.040138 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.040163 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.040227 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:41Z","lastTransitionTime":"2025-12-05T10:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.143448 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.143530 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.143550 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.143576 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.143594 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:41Z","lastTransitionTime":"2025-12-05T10:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.245760 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.245810 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.245821 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.245842 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.245855 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:41Z","lastTransitionTime":"2025-12-05T10:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.317860 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.317927 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.317972 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:41 crc kubenswrapper[5014]: E1205 10:48:41.318046 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.318064 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:41 crc kubenswrapper[5014]: E1205 10:48:41.318238 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:41 crc kubenswrapper[5014]: E1205 10:48:41.318329 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:41 crc kubenswrapper[5014]: E1205 10:48:41.318723 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.319324 5014 scope.go:117] "RemoveContainer" containerID="ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.354470 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.354543 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.354558 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.354583 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.354604 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:41Z","lastTransitionTime":"2025-12-05T10:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.456747 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.457171 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.457185 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.457202 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.457215 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:41Z","lastTransitionTime":"2025-12-05T10:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.560283 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.560342 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.560353 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.560370 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.560379 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:41Z","lastTransitionTime":"2025-12-05T10:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.640121 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:41 crc kubenswrapper[5014]: E1205 10:48:41.640333 5014 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:41 crc kubenswrapper[5014]: E1205 10:48:41.640447 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs podName:97abc013-62da-459c-b7ec-2a78304dcc56 nodeName:}" failed. No retries permitted until 2025-12-05 10:48:57.640423356 +0000 UTC m=+64.588541060 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs") pod "network-metrics-daemon-vrt2x" (UID: "97abc013-62da-459c-b7ec-2a78304dcc56") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.663222 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.663264 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.663290 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.663307 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.663318 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:41Z","lastTransitionTime":"2025-12-05T10:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.765495 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.765538 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.765548 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.765566 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.765578 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:41Z","lastTransitionTime":"2025-12-05T10:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.868115 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.868228 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.868246 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.868261 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.868353 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:41Z","lastTransitionTime":"2025-12-05T10:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.971593 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.971654 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.971670 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.971688 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:41 crc kubenswrapper[5014]: I1205 10:48:41.971703 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:41Z","lastTransitionTime":"2025-12-05T10:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.028178 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/1.log" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.030082 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad"} Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.030742 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.042869 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.066701 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.074714 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.074765 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.074777 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.074795 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.074810 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:42Z","lastTransitionTime":"2025-12-05T10:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.092653 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.111756 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.124470 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.149343 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.163132 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.179535 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.179573 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.179584 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.179599 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.179609 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:42Z","lastTransitionTime":"2025-12-05T10:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.197785 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.213735 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.230443 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.248044 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.269149 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"tart network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:24.106679 6435 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf after 0 failed attempt(s)\\\\nI1205 10:48:24.106652 6435 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}\\\\nI1205 10:48:24.106687 6435 default_network_controller.go:776] Recording success event on pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 10:48:24.106703 6435 obj_retry.go:420] Function \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.282812 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.282857 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.282868 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.282887 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.283092 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:42Z","lastTransitionTime":"2025-12-05T10:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.287294 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.301528 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.315318 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.328667 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.342644 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.386933 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.386990 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.387002 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.387025 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.387038 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:42Z","lastTransitionTime":"2025-12-05T10:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.489757 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.489798 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.489807 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.489825 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.489834 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:42Z","lastTransitionTime":"2025-12-05T10:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.592185 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.592262 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.592289 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.592314 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.592332 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:42Z","lastTransitionTime":"2025-12-05T10:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.697003 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.697089 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.697100 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.697124 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.697141 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:42Z","lastTransitionTime":"2025-12-05T10:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.799844 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.799893 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.799903 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.799923 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.799933 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:42Z","lastTransitionTime":"2025-12-05T10:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.902451 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.902517 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.902533 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.902555 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:42 crc kubenswrapper[5014]: I1205 10:48:42.902568 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:42Z","lastTransitionTime":"2025-12-05T10:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.004902 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.004986 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.004996 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.005019 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.005034 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:43Z","lastTransitionTime":"2025-12-05T10:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.109194 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.109251 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.109261 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.109302 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.109314 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:43Z","lastTransitionTime":"2025-12-05T10:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.212457 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.212525 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.212543 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.212566 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.212581 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:43Z","lastTransitionTime":"2025-12-05T10:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.260235 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.260472 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.260509 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:49:15.260470716 +0000 UTC m=+82.208588420 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.260573 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.260614 5014 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.260653 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.260696 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:49:15.26067119 +0000 UTC m=+82.208788994 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.260777 5014 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.260820 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.260836 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.260848 5014 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.260853 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:49:15.260841634 +0000 UTC m=+82.208959458 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.260874 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 10:49:15.260867675 +0000 UTC m=+82.208985379 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.316124 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.316221 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.316245 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.316318 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.316358 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:43Z","lastTransitionTime":"2025-12-05T10:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.317221 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.317359 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.317476 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.317512 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.317561 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.317636 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.317708 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.317783 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.335324 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.357159 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.361397 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.361808 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.361882 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.361910 5014 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:43 crc kubenswrapper[5014]: E1205 10:48:43.362029 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 10:49:15.361993631 +0000 UTC m=+82.310111375 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.371949 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.391960 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.410615 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.419660 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.419709 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.419720 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.419740 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.419752 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:43Z","lastTransitionTime":"2025-12-05T10:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.422964 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.434606 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.459817 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.477795 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.495179 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.513946 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.522746 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.522788 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.522807 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.522827 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.522840 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:43Z","lastTransitionTime":"2025-12-05T10:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.564228 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"tart network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:24.106679 6435 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf after 0 failed attempt(s)\\\\nI1205 10:48:24.106652 6435 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}\\\\nI1205 10:48:24.106687 6435 default_network_controller.go:776] Recording success event on pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 10:48:24.106703 6435 obj_retry.go:420] Function \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.592424 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.611676 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.625059 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.625115 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.625129 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.625154 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.625169 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:43Z","lastTransitionTime":"2025-12-05T10:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.627040 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.643166 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.653259 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:43Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.728609 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.729361 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.729487 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.729555 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.729622 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:43Z","lastTransitionTime":"2025-12-05T10:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.833947 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.834007 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.834021 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.834045 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.834059 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:43Z","lastTransitionTime":"2025-12-05T10:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.936938 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.937296 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.937395 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.937467 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:43 crc kubenswrapper[5014]: I1205 10:48:43.937586 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:43Z","lastTransitionTime":"2025-12-05T10:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.040014 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.040090 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.040105 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.040124 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.040161 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.041131 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/2.log" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.042049 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/1.log" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.045588 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad" exitCode=1 Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.045676 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad"} Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.045759 5014 scope.go:117] "RemoveContainer" containerID="ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.046716 5014 scope.go:117] "RemoveContainer" containerID="6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad" Dec 05 10:48:44 crc kubenswrapper[5014]: E1205 10:48:44.047054 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.067097 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.079310 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.091616 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.102296 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.117391 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.138983 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.143504 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.143559 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.143570 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.143592 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.143604 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.154555 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.173552 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.188317 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.201216 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.213856 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.226450 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.238105 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.246420 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.246472 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.246501 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.246519 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.246534 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.249109 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.262000 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.283427 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"tart network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:24.106679 6435 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf after 0 failed attempt(s)\\\\nI1205 10:48:24.106652 6435 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}\\\\nI1205 10:48:24.106687 6435 default_network_controller.go:776] Recording success event on pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 10:48:24.106703 6435 obj_retry.go:420] Function \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:43Z\\\",\\\"message\\\":\\\"in network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:42.617861 6639 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-operators]} name:Service_openshift-marketplace/redhat-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.138:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97419c58-41c7-41d7-a137-a446f0c7eeb3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 10:48:42.614887 6639 services_controller.go:451] Built service openshift-kube-apiserver-operator/metr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.307771 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.349406 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.349467 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.349487 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.349510 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.349522 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.451896 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.451949 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.451960 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.451982 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.451992 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.554573 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.554637 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.554653 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.554677 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.554693 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.656970 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.657046 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.657064 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.657087 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.657104 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.760567 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.760631 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.760651 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.760670 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.760690 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.836173 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.836228 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.836255 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.836308 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.836327 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: E1205 10:48:44.851530 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.855957 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.856021 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.856034 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.856060 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.856075 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: E1205 10:48:44.873546 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.877420 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.877472 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.877487 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.877507 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.877519 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: E1205 10:48:44.894347 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.898433 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.898481 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.898497 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.898521 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.898537 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: E1205 10:48:44.916885 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.922095 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.922136 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.922157 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.922184 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.922206 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:44 crc kubenswrapper[5014]: E1205 10:48:44.938463 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:44Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:44 crc kubenswrapper[5014]: E1205 10:48:44.938598 5014 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.940673 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.940720 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.940729 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.940746 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:44 crc kubenswrapper[5014]: I1205 10:48:44.940757 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:44Z","lastTransitionTime":"2025-12-05T10:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.043227 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.043354 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.043393 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.043425 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.043447 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:45Z","lastTransitionTime":"2025-12-05T10:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.059497 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/2.log" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.145905 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.146064 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.146094 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.146128 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.146151 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:45Z","lastTransitionTime":"2025-12-05T10:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.252520 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.252576 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.252589 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.252609 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.252623 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:45Z","lastTransitionTime":"2025-12-05T10:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.318308 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.318493 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.318357 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.318357 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:45 crc kubenswrapper[5014]: E1205 10:48:45.318642 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:45 crc kubenswrapper[5014]: E1205 10:48:45.318843 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:45 crc kubenswrapper[5014]: E1205 10:48:45.318997 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:45 crc kubenswrapper[5014]: E1205 10:48:45.319170 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.356123 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.356184 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.356201 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.356225 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.356239 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:45Z","lastTransitionTime":"2025-12-05T10:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.460264 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.460364 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.460382 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.460414 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.460436 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:45Z","lastTransitionTime":"2025-12-05T10:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.563743 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.563812 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.563836 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.563865 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.563889 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:45Z","lastTransitionTime":"2025-12-05T10:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.667949 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.668014 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.668027 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.668048 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.668063 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:45Z","lastTransitionTime":"2025-12-05T10:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.771869 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.771909 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.771920 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.771938 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.771950 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:45Z","lastTransitionTime":"2025-12-05T10:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.874232 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.874341 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.874356 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.874378 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.874395 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:45Z","lastTransitionTime":"2025-12-05T10:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.977737 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.977828 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.977837 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.977858 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:45 crc kubenswrapper[5014]: I1205 10:48:45.977870 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:45Z","lastTransitionTime":"2025-12-05T10:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.081109 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.081158 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.081183 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.081204 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.081218 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:46Z","lastTransitionTime":"2025-12-05T10:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.184660 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.184731 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.184744 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.184765 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.184780 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:46Z","lastTransitionTime":"2025-12-05T10:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.288239 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.288311 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.288323 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.288345 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.288358 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:46Z","lastTransitionTime":"2025-12-05T10:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.392304 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.392365 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.392375 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.392393 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.392405 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:46Z","lastTransitionTime":"2025-12-05T10:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.494980 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.495112 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.495127 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.495151 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.495166 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:46Z","lastTransitionTime":"2025-12-05T10:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.600528 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.600608 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.600630 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.600662 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.600685 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:46Z","lastTransitionTime":"2025-12-05T10:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.703203 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.703252 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.703281 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.703299 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.703310 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:46Z","lastTransitionTime":"2025-12-05T10:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.806507 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.806562 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.806576 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.806593 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.806603 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:46Z","lastTransitionTime":"2025-12-05T10:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.909860 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.909947 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.909971 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.910004 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:46 crc kubenswrapper[5014]: I1205 10:48:46.910027 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:46Z","lastTransitionTime":"2025-12-05T10:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.013069 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.013121 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.013131 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.013147 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.013158 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:47Z","lastTransitionTime":"2025-12-05T10:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.117374 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.117438 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.117466 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.117491 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.117506 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:47Z","lastTransitionTime":"2025-12-05T10:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.220651 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.220708 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.220726 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.220763 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.220782 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:47Z","lastTransitionTime":"2025-12-05T10:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.317550 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.317614 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.317677 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:47 crc kubenswrapper[5014]: E1205 10:48:47.317745 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.317789 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:47 crc kubenswrapper[5014]: E1205 10:48:47.317968 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:47 crc kubenswrapper[5014]: E1205 10:48:47.318127 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:47 crc kubenswrapper[5014]: E1205 10:48:47.318199 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.325144 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.325197 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.325219 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.325244 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.325262 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:47Z","lastTransitionTime":"2025-12-05T10:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.427894 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.427956 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.427966 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.427985 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.428001 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:47Z","lastTransitionTime":"2025-12-05T10:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.531143 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.531236 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.531254 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.531307 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.531324 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:47Z","lastTransitionTime":"2025-12-05T10:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.634528 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.634590 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.634601 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.634628 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.634645 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:47Z","lastTransitionTime":"2025-12-05T10:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.738206 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.738258 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.738331 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.738357 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.738370 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:47Z","lastTransitionTime":"2025-12-05T10:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.842039 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.842102 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.842114 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.842163 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.842177 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:47Z","lastTransitionTime":"2025-12-05T10:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.944849 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.944934 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.944953 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.944985 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:47 crc kubenswrapper[5014]: I1205 10:48:47.945007 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:47Z","lastTransitionTime":"2025-12-05T10:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.048187 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.048236 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.048249 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.048319 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.048334 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:48Z","lastTransitionTime":"2025-12-05T10:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.151064 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.151125 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.151141 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.151165 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.151181 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:48Z","lastTransitionTime":"2025-12-05T10:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.254265 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.254397 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.254425 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.254462 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.254488 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:48Z","lastTransitionTime":"2025-12-05T10:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.292889 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.311299 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.314366 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.332864 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.347404 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.358234 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.358319 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.358333 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.358353 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.358369 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:48Z","lastTransitionTime":"2025-12-05T10:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.363610 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.385143 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.405056 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.419982 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.435532 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.446208 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.457403 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.462585 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.462632 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.462645 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.462667 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.462683 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:48Z","lastTransitionTime":"2025-12-05T10:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.474903 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.489847 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.505454 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.521299 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.538558 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"tart network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:24.106679 6435 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf after 0 failed attempt(s)\\\\nI1205 10:48:24.106652 6435 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}\\\\nI1205 10:48:24.106687 6435 default_network_controller.go:776] Recording success event on pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 10:48:24.106703 6435 obj_retry.go:420] Function \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:43Z\\\",\\\"message\\\":\\\"in network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:42.617861 6639 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-operators]} name:Service_openshift-marketplace/redhat-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.138:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97419c58-41c7-41d7-a137-a446f0c7eeb3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 10:48:42.614887 6639 services_controller.go:451] Built service openshift-kube-apiserver-operator/metr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.553369 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.566552 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.566626 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.566648 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.566678 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.566697 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:48Z","lastTransitionTime":"2025-12-05T10:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.572398 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:48Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.670435 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.670492 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.670504 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.670529 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.670544 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:48Z","lastTransitionTime":"2025-12-05T10:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.773349 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.773445 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.773468 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.773501 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.773522 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:48Z","lastTransitionTime":"2025-12-05T10:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.876848 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.876982 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.876996 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.877018 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.877031 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:48Z","lastTransitionTime":"2025-12-05T10:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.979665 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.979709 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.979722 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.979741 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:48 crc kubenswrapper[5014]: I1205 10:48:48.979753 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:48Z","lastTransitionTime":"2025-12-05T10:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.082194 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.082295 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.082314 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.082338 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.082358 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:49Z","lastTransitionTime":"2025-12-05T10:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.185806 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.185897 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.185931 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.185983 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.186007 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:49Z","lastTransitionTime":"2025-12-05T10:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.289839 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.289922 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.289954 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.289985 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.290027 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:49Z","lastTransitionTime":"2025-12-05T10:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.317803 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:49 crc kubenswrapper[5014]: E1205 10:48:49.318017 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.318407 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:49 crc kubenswrapper[5014]: E1205 10:48:49.318557 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.318601 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.318662 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:49 crc kubenswrapper[5014]: E1205 10:48:49.318786 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:49 crc kubenswrapper[5014]: E1205 10:48:49.318983 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.393166 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.393228 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.393248 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.393316 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.393337 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:49Z","lastTransitionTime":"2025-12-05T10:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.497728 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.497810 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.497830 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.497862 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.497884 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:49Z","lastTransitionTime":"2025-12-05T10:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.601646 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.601723 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.601745 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.601772 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.601791 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:49Z","lastTransitionTime":"2025-12-05T10:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.706189 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.706327 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.706360 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.706395 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.706424 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:49Z","lastTransitionTime":"2025-12-05T10:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.809333 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.809451 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.809477 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.809509 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.809530 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:49Z","lastTransitionTime":"2025-12-05T10:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.913126 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.913234 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.913258 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.913361 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:49 crc kubenswrapper[5014]: I1205 10:48:49.913391 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:49Z","lastTransitionTime":"2025-12-05T10:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.017111 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.017228 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.017262 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.017339 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.017367 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:50Z","lastTransitionTime":"2025-12-05T10:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.121993 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.122055 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.122066 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.122089 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.122104 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:50Z","lastTransitionTime":"2025-12-05T10:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.225603 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.225675 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.225688 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.225709 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.225722 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:50Z","lastTransitionTime":"2025-12-05T10:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.328183 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.328252 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.328300 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.328318 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.328330 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:50Z","lastTransitionTime":"2025-12-05T10:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.432477 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.432536 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.432549 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.432571 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.432584 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:50Z","lastTransitionTime":"2025-12-05T10:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.535642 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.535699 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.535711 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.535731 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.535748 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:50Z","lastTransitionTime":"2025-12-05T10:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.639096 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.639156 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.639174 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.639199 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.639219 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:50Z","lastTransitionTime":"2025-12-05T10:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.741825 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.741862 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.741871 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.741887 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.741898 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:50Z","lastTransitionTime":"2025-12-05T10:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.845064 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.845124 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.845137 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.845160 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.845173 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:50Z","lastTransitionTime":"2025-12-05T10:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.948568 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.948618 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.948628 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.948646 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:50 crc kubenswrapper[5014]: I1205 10:48:50.948656 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:50Z","lastTransitionTime":"2025-12-05T10:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.051837 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.051911 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.051938 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.051971 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.051994 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:51Z","lastTransitionTime":"2025-12-05T10:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.155730 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.155808 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.155839 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.155874 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.155902 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:51Z","lastTransitionTime":"2025-12-05T10:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.260021 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.260094 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.260107 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.260130 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.260148 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:51Z","lastTransitionTime":"2025-12-05T10:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.318017 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:51 crc kubenswrapper[5014]: E1205 10:48:51.318189 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.318006 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:51 crc kubenswrapper[5014]: E1205 10:48:51.318403 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.318204 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.318006 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:51 crc kubenswrapper[5014]: E1205 10:48:51.318515 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:51 crc kubenswrapper[5014]: E1205 10:48:51.318681 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.362915 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.362953 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.362963 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.362979 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.362991 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:51Z","lastTransitionTime":"2025-12-05T10:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.465600 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.465664 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.465675 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.465697 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.465709 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:51Z","lastTransitionTime":"2025-12-05T10:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.569677 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.569756 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.569775 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.569809 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.569832 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:51Z","lastTransitionTime":"2025-12-05T10:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.673383 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.673436 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.673446 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.673465 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.673476 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:51Z","lastTransitionTime":"2025-12-05T10:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.776876 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.776957 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.776975 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.777002 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.777020 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:51Z","lastTransitionTime":"2025-12-05T10:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.880094 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.880150 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.880162 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.880180 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.880192 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:51Z","lastTransitionTime":"2025-12-05T10:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.983392 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.983491 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.983525 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.983559 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:51 crc kubenswrapper[5014]: I1205 10:48:51.983583 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:51Z","lastTransitionTime":"2025-12-05T10:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.086646 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.086703 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.086712 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.086731 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.086742 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:52Z","lastTransitionTime":"2025-12-05T10:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.189974 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.190037 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.190051 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.190078 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.190095 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:52Z","lastTransitionTime":"2025-12-05T10:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.293287 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.293342 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.293352 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.293373 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.293384 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:52Z","lastTransitionTime":"2025-12-05T10:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.396321 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.396397 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.396417 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.396447 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.396465 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:52Z","lastTransitionTime":"2025-12-05T10:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.499445 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.499972 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.499998 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.500035 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.500061 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:52Z","lastTransitionTime":"2025-12-05T10:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.603612 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.603698 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.603719 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.603748 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.603768 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:52Z","lastTransitionTime":"2025-12-05T10:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.707404 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.707469 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.707481 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.707501 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.707514 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:52Z","lastTransitionTime":"2025-12-05T10:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.811188 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.811246 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.811261 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.811321 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.811340 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:52Z","lastTransitionTime":"2025-12-05T10:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.915060 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.915162 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.915186 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.915218 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:52 crc kubenswrapper[5014]: I1205 10:48:52.915239 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:52Z","lastTransitionTime":"2025-12-05T10:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.019195 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.019331 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.019359 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.019392 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.019421 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:53Z","lastTransitionTime":"2025-12-05T10:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.123605 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.123683 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.123700 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.123729 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.123749 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:53Z","lastTransitionTime":"2025-12-05T10:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.230840 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.230931 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.230962 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.231000 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.231026 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:53Z","lastTransitionTime":"2025-12-05T10:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.317391 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.317579 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.317648 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:53 crc kubenswrapper[5014]: E1205 10:48:53.317740 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.317808 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:53 crc kubenswrapper[5014]: E1205 10:48:53.318066 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:53 crc kubenswrapper[5014]: E1205 10:48:53.318242 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:53 crc kubenswrapper[5014]: E1205 10:48:53.318398 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.334921 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.335223 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.335729 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.335753 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.335780 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.335800 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:53Z","lastTransitionTime":"2025-12-05T10:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.363731 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.380872 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.397106 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.414485 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.430227 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.439261 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.439325 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.439342 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.439361 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.439373 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:53Z","lastTransitionTime":"2025-12-05T10:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.442052 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.454766 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.467916 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a56626-38db-48e9-890b-6fb4e3f18361\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.481650 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.496846 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.512068 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.530479 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed71423bbb0be2dc5e8962d0f162e48aee9c6e894a91597b8661c0b813a87f59\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:24Z\\\",\\\"message\\\":\\\"tart network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:24Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:24.106679 6435 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf after 0 failed attempt(s)\\\\nI1205 10:48:24.106652 6435 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}\\\\nI1205 10:48:24.106687 6435 default_network_controller.go:776] Recording success event on pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 10:48:24.106703 6435 obj_retry.go:420] Function \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:43Z\\\",\\\"message\\\":\\\"in network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:42.617861 6639 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-operators]} name:Service_openshift-marketplace/redhat-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.138:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97419c58-41c7-41d7-a137-a446f0c7eeb3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 10:48:42.614887 6639 services_controller.go:451] Built service openshift-kube-apiserver-operator/metr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.541743 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.541784 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.541795 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.541814 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.541828 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:53Z","lastTransitionTime":"2025-12-05T10:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.550155 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.562878 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.579002 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.594720 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.607910 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:53Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.643825 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.643865 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.643874 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.643889 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.643900 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:53Z","lastTransitionTime":"2025-12-05T10:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.748427 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.748492 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.748502 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.748525 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.748537 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:53Z","lastTransitionTime":"2025-12-05T10:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.852316 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.852374 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.852389 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.852415 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.852433 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:53Z","lastTransitionTime":"2025-12-05T10:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.955839 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.955887 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.955897 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.955914 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:53 crc kubenswrapper[5014]: I1205 10:48:53.955926 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:53Z","lastTransitionTime":"2025-12-05T10:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.059677 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.059723 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.059737 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.059766 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.059780 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:54Z","lastTransitionTime":"2025-12-05T10:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.161946 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.161989 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.161998 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.162014 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.162023 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:54Z","lastTransitionTime":"2025-12-05T10:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.264736 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.264787 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.264799 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.264820 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.264832 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:54Z","lastTransitionTime":"2025-12-05T10:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.368163 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.368222 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.368232 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.368249 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.368260 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:54Z","lastTransitionTime":"2025-12-05T10:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.471565 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.471611 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.471626 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.471646 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.471661 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:54Z","lastTransitionTime":"2025-12-05T10:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.573929 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.573993 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.574003 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.574023 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.574036 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:54Z","lastTransitionTime":"2025-12-05T10:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.677374 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.677430 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.677443 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.677462 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.677474 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:54Z","lastTransitionTime":"2025-12-05T10:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.780058 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.780119 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.780129 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.780150 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.780169 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:54Z","lastTransitionTime":"2025-12-05T10:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.883205 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.883305 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.883318 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.883339 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.883352 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:54Z","lastTransitionTime":"2025-12-05T10:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.986263 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.986341 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.986354 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.986399 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:54 crc kubenswrapper[5014]: I1205 10:48:54.986417 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:54Z","lastTransitionTime":"2025-12-05T10:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.089653 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.089723 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.089734 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.089755 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.089770 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.090849 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.090904 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.090919 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.090932 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.090943 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: E1205 10:48:55.106457 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:55Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.111310 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.111374 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.111387 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.111410 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.111424 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: E1205 10:48:55.126507 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:55Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.130979 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.131019 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.131032 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.131053 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.131069 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: E1205 10:48:55.146973 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:55Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.150990 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.151020 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.151033 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.151049 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.151060 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: E1205 10:48:55.166787 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:55Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.171645 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.171900 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.171916 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.171975 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.171992 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: E1205 10:48:55.185161 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:55Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:55 crc kubenswrapper[5014]: E1205 10:48:55.185290 5014 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.192461 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.192534 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.192552 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.192571 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.192585 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.295838 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.295885 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.295898 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.295915 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.295931 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.317167 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.317174 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.317417 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:55 crc kubenswrapper[5014]: E1205 10:48:55.317573 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.317609 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:55 crc kubenswrapper[5014]: E1205 10:48:55.317762 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:55 crc kubenswrapper[5014]: E1205 10:48:55.317928 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:55 crc kubenswrapper[5014]: E1205 10:48:55.318017 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.399922 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.400008 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.400027 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.400054 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.400081 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.503980 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.504033 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.504051 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.504072 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.504085 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.606774 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.606882 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.606905 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.606936 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.606955 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.709827 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.709903 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.709937 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.709968 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.710024 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.813231 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.813304 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.813317 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.813334 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.813346 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.915556 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.915605 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.915619 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.915637 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:55 crc kubenswrapper[5014]: I1205 10:48:55.915650 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:55Z","lastTransitionTime":"2025-12-05T10:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.019418 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.019472 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.019485 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.019503 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.019516 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:56Z","lastTransitionTime":"2025-12-05T10:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.122308 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.122359 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.122373 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.122391 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.122403 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:56Z","lastTransitionTime":"2025-12-05T10:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.224899 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.224958 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.224973 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.224998 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.225015 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:56Z","lastTransitionTime":"2025-12-05T10:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.328739 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.328801 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.328817 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.328844 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.328863 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:56Z","lastTransitionTime":"2025-12-05T10:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.432116 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.432163 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.432173 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.432191 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.432206 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:56Z","lastTransitionTime":"2025-12-05T10:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.535415 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.535482 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.535500 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.535533 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.535554 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:56Z","lastTransitionTime":"2025-12-05T10:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.639265 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.639355 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.639379 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.639401 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.639412 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:56Z","lastTransitionTime":"2025-12-05T10:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.742585 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.742646 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.742661 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.742686 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.742702 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:56Z","lastTransitionTime":"2025-12-05T10:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.845447 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.845497 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.845507 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.845525 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.845537 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:56Z","lastTransitionTime":"2025-12-05T10:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.948440 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.948505 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.948526 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.948553 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:56 crc kubenswrapper[5014]: I1205 10:48:56.948572 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:56Z","lastTransitionTime":"2025-12-05T10:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.052665 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.052735 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.052751 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.052778 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.052796 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:57Z","lastTransitionTime":"2025-12-05T10:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.155733 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.155801 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.155818 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.155846 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.155863 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:57Z","lastTransitionTime":"2025-12-05T10:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.258348 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.258395 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.258408 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.258425 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.258437 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:57Z","lastTransitionTime":"2025-12-05T10:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.317888 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.317935 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.317900 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.318170 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:57 crc kubenswrapper[5014]: E1205 10:48:57.318313 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:57 crc kubenswrapper[5014]: E1205 10:48:57.318367 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:57 crc kubenswrapper[5014]: E1205 10:48:57.318441 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.318612 5014 scope.go:117] "RemoveContainer" containerID="6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad" Dec 05 10:48:57 crc kubenswrapper[5014]: E1205 10:48:57.318591 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:57 crc kubenswrapper[5014]: E1205 10:48:57.318772 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.330240 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.341867 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a56626-38db-48e9-890b-6fb4e3f18361\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.354391 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.361311 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.361376 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.361388 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.361406 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.361416 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:57Z","lastTransitionTime":"2025-12-05T10:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.367161 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.379738 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.391115 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.402380 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.411913 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.436102 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.450186 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.464032 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.464073 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.464083 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.464101 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.464113 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:57Z","lastTransitionTime":"2025-12-05T10:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.464900 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.479194 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.496470 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:43Z\\\",\\\"message\\\":\\\"in network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:42.617861 6639 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-operators]} name:Service_openshift-marketplace/redhat-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.138:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97419c58-41c7-41d7-a137-a446f0c7eeb3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 10:48:42.614887 6639 services_controller.go:451] Built service openshift-kube-apiserver-operator/metr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.508451 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.520681 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.534813 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.547148 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.557004 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:57Z is after 2025-08-24T17:21:41Z" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.567168 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.567211 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.567223 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.567247 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.567263 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:57Z","lastTransitionTime":"2025-12-05T10:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.670564 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.670651 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.670676 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.670713 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.670740 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:57Z","lastTransitionTime":"2025-12-05T10:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.738563 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:57 crc kubenswrapper[5014]: E1205 10:48:57.738828 5014 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:57 crc kubenswrapper[5014]: E1205 10:48:57.738969 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs podName:97abc013-62da-459c-b7ec-2a78304dcc56 nodeName:}" failed. No retries permitted until 2025-12-05 10:49:29.738934187 +0000 UTC m=+96.687051931 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs") pod "network-metrics-daemon-vrt2x" (UID: "97abc013-62da-459c-b7ec-2a78304dcc56") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.774474 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.774553 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.774574 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.774606 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.774627 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:57Z","lastTransitionTime":"2025-12-05T10:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.877996 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.878052 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.878064 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.878084 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.878099 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:57Z","lastTransitionTime":"2025-12-05T10:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.981642 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.981699 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.981717 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.981740 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:57 crc kubenswrapper[5014]: I1205 10:48:57.981756 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:57Z","lastTransitionTime":"2025-12-05T10:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.084807 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.084865 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.084882 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.084906 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.084924 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:58Z","lastTransitionTime":"2025-12-05T10:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.187305 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.187358 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.187371 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.187392 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.187407 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:58Z","lastTransitionTime":"2025-12-05T10:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.302014 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.302058 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.302069 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.302085 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.302096 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:58Z","lastTransitionTime":"2025-12-05T10:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.405415 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.405481 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.405493 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.405511 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.405677 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:58Z","lastTransitionTime":"2025-12-05T10:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.508495 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.508556 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.508576 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.508603 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.508624 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:58Z","lastTransitionTime":"2025-12-05T10:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.611502 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.611549 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.611560 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.611575 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.611585 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:58Z","lastTransitionTime":"2025-12-05T10:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.714578 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.714610 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.714618 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.714632 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.714640 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:58Z","lastTransitionTime":"2025-12-05T10:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.817865 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.817914 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.817924 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.817942 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.817956 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:58Z","lastTransitionTime":"2025-12-05T10:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.920710 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.920751 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.920762 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.920777 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:58 crc kubenswrapper[5014]: I1205 10:48:58.920793 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:58Z","lastTransitionTime":"2025-12-05T10:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.023570 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.023679 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.023697 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.023725 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.023747 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:59Z","lastTransitionTime":"2025-12-05T10:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.126507 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.126553 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.126565 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.126583 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.126596 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:59Z","lastTransitionTime":"2025-12-05T10:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.229574 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.229652 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.229664 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.229689 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.229702 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:59Z","lastTransitionTime":"2025-12-05T10:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.318063 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.318133 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.318142 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.318144 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:48:59 crc kubenswrapper[5014]: E1205 10:48:59.318259 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:48:59 crc kubenswrapper[5014]: E1205 10:48:59.318368 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:48:59 crc kubenswrapper[5014]: E1205 10:48:59.318456 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:48:59 crc kubenswrapper[5014]: E1205 10:48:59.318568 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.332011 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.332054 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.332067 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.332086 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.332098 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:59Z","lastTransitionTime":"2025-12-05T10:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.436808 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.436944 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.436969 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.437005 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.437029 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:59Z","lastTransitionTime":"2025-12-05T10:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.541905 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.541946 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.541956 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.541972 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.541983 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:59Z","lastTransitionTime":"2025-12-05T10:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.644960 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.645009 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.645022 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.645046 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.645067 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:59Z","lastTransitionTime":"2025-12-05T10:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.747813 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.747889 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.747906 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.747928 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.747943 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:59Z","lastTransitionTime":"2025-12-05T10:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.850799 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.850858 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.850868 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.850889 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.850900 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:59Z","lastTransitionTime":"2025-12-05T10:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.953618 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.953692 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.953709 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.953729 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:48:59 crc kubenswrapper[5014]: I1205 10:48:59.953741 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:48:59Z","lastTransitionTime":"2025-12-05T10:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.056755 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.056812 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.056823 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.056845 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.056862 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:00Z","lastTransitionTime":"2025-12-05T10:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.160621 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.160680 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.160691 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.160709 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.160721 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:00Z","lastTransitionTime":"2025-12-05T10:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.263866 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.263908 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.263920 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.263936 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.263945 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:00Z","lastTransitionTime":"2025-12-05T10:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.367562 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.367636 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.367650 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.367672 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.367683 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:00Z","lastTransitionTime":"2025-12-05T10:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.470777 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.470816 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.470826 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.470843 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.470855 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:00Z","lastTransitionTime":"2025-12-05T10:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.574029 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.574091 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.574101 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.574119 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.574129 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:00Z","lastTransitionTime":"2025-12-05T10:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.677490 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.677549 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.677711 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.677737 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.677998 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:00Z","lastTransitionTime":"2025-12-05T10:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.781511 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.781562 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.781575 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.781591 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.781605 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:00Z","lastTransitionTime":"2025-12-05T10:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.885173 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.885251 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.885322 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.885360 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.885385 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:00Z","lastTransitionTime":"2025-12-05T10:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.988767 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.988839 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.988852 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.988876 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:00 crc kubenswrapper[5014]: I1205 10:49:00.988889 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:00Z","lastTransitionTime":"2025-12-05T10:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.092308 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.092360 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.092375 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.092394 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.092408 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:01Z","lastTransitionTime":"2025-12-05T10:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.123219 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/0.log" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.123326 5014 generic.go:334] "Generic (PLEG): container finished" podID="f8198e15-3b7a-4c40-b4b3-63382eba5846" containerID="0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc" exitCode=1 Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.123376 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-424mc" event={"ID":"f8198e15-3b7a-4c40-b4b3-63382eba5846","Type":"ContainerDied","Data":"0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc"} Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.123981 5014 scope.go:117] "RemoveContainer" containerID="0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.140148 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.159033 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.174466 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.190570 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.194661 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.194726 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.194739 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.194762 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.194776 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:01Z","lastTransitionTime":"2025-12-05T10:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.205825 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.220970 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.232012 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.244528 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a56626-38db-48e9-890b-6fb4e3f18361\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.257870 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.271728 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.286434 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.297595 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.297651 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.297665 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.297690 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.297707 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:01Z","lastTransitionTime":"2025-12-05T10:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.314321 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:43Z\\\",\\\"message\\\":\\\"in network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:42.617861 6639 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-operators]} name:Service_openshift-marketplace/redhat-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.138:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97419c58-41c7-41d7-a137-a446f0c7eeb3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 10:48:42.614887 6639 services_controller.go:451] Built service openshift-kube-apiserver-operator/metr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.318372 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.318433 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.318569 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:01 crc kubenswrapper[5014]: E1205 10:49:01.318562 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.318662 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:01 crc kubenswrapper[5014]: E1205 10:49:01.318778 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:01 crc kubenswrapper[5014]: E1205 10:49:01.318853 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:01 crc kubenswrapper[5014]: E1205 10:49:01.318949 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.341295 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.357210 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.373907 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.393178 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.400930 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.400969 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.400982 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.401001 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.401014 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:01Z","lastTransitionTime":"2025-12-05T10:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.410023 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:00Z\\\",\\\"message\\\":\\\"2025-12-05T10:48:15+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df\\\\n2025-12-05T10:48:15+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df to /host/opt/cni/bin/\\\\n2025-12-05T10:48:15Z [verbose] multus-daemon started\\\\n2025-12-05T10:48:15Z [verbose] Readiness Indicator file check\\\\n2025-12-05T10:49:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.424670 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:01Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.504390 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.504450 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.504463 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.504489 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.504501 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:01Z","lastTransitionTime":"2025-12-05T10:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.607052 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.607091 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.607101 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.607118 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.607128 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:01Z","lastTransitionTime":"2025-12-05T10:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.735466 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.735505 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.735514 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.735530 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.735541 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:01Z","lastTransitionTime":"2025-12-05T10:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.837896 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.837943 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.837952 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.837969 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.837982 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:01Z","lastTransitionTime":"2025-12-05T10:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.941872 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.941919 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.941932 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.941950 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:01 crc kubenswrapper[5014]: I1205 10:49:01.941962 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:01Z","lastTransitionTime":"2025-12-05T10:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.044444 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.044519 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.044532 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.044575 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.044593 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:02Z","lastTransitionTime":"2025-12-05T10:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.129504 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/0.log" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.129577 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-424mc" event={"ID":"f8198e15-3b7a-4c40-b4b3-63382eba5846","Type":"ContainerStarted","Data":"f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.146976 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.147011 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.147021 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.147039 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.147052 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:02Z","lastTransitionTime":"2025-12-05T10:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.149293 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.162222 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.176598 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.189880 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.204349 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.217563 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.233246 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a56626-38db-48e9-890b-6fb4e3f18361\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.249548 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.249584 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.249611 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.249629 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.249639 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:02Z","lastTransitionTime":"2025-12-05T10:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.251829 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.269222 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.284151 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.303717 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:43Z\\\",\\\"message\\\":\\\"in network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:42.617861 6639 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-operators]} name:Service_openshift-marketplace/redhat-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.138:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97419c58-41c7-41d7-a137-a446f0c7eeb3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 10:48:42.614887 6639 services_controller.go:451] Built service openshift-kube-apiserver-operator/metr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.329389 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.342595 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.353198 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.353244 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.353255 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.353289 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.353301 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:02Z","lastTransitionTime":"2025-12-05T10:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.355599 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:00Z\\\",\\\"message\\\":\\\"2025-12-05T10:48:15+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df\\\\n2025-12-05T10:48:15+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df to /host/opt/cni/bin/\\\\n2025-12-05T10:48:15Z [verbose] multus-daemon started\\\\n2025-12-05T10:48:15Z [verbose] Readiness Indicator file check\\\\n2025-12-05T10:49:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:49:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.367372 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.382566 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.397314 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.412308 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:02Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.456003 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.456386 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.456495 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.456587 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.456667 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:02Z","lastTransitionTime":"2025-12-05T10:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.559785 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.560249 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.560405 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.560550 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.560636 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:02Z","lastTransitionTime":"2025-12-05T10:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.663570 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.663625 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.663637 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.663656 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.663668 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:02Z","lastTransitionTime":"2025-12-05T10:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.766692 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.766970 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.767075 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.767170 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.767249 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:02Z","lastTransitionTime":"2025-12-05T10:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.870077 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.870132 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.870150 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.870168 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.870180 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:02Z","lastTransitionTime":"2025-12-05T10:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.973330 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.973403 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.973420 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.973447 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:02 crc kubenswrapper[5014]: I1205 10:49:02.973466 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:02Z","lastTransitionTime":"2025-12-05T10:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.076232 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.076295 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.076308 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.076327 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.076340 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:03Z","lastTransitionTime":"2025-12-05T10:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.178660 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.178707 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.178717 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.178741 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.178754 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:03Z","lastTransitionTime":"2025-12-05T10:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.281023 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.281064 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.281076 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.281098 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.281114 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:03Z","lastTransitionTime":"2025-12-05T10:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.317486 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.317582 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:03 crc kubenswrapper[5014]: E1205 10:49:03.317655 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:03 crc kubenswrapper[5014]: E1205 10:49:03.317762 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.317858 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:03 crc kubenswrapper[5014]: E1205 10:49:03.317917 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.318207 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:03 crc kubenswrapper[5014]: E1205 10:49:03.318471 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.345172 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.361286 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.386599 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.388441 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.388479 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.388492 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.388512 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.388526 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:03Z","lastTransitionTime":"2025-12-05T10:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.406886 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.428152 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:43Z\\\",\\\"message\\\":\\\"in network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:42.617861 6639 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-operators]} name:Service_openshift-marketplace/redhat-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.138:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97419c58-41c7-41d7-a137-a446f0c7eeb3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 10:48:42.614887 6639 services_controller.go:451] Built service openshift-kube-apiserver-operator/metr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.445550 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:00Z\\\",\\\"message\\\":\\\"2025-12-05T10:48:15+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df\\\\n2025-12-05T10:48:15+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df to /host/opt/cni/bin/\\\\n2025-12-05T10:48:15Z [verbose] multus-daemon started\\\\n2025-12-05T10:48:15Z [verbose] Readiness Indicator file check\\\\n2025-12-05T10:49:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:49:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.460150 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.477061 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.491374 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.491418 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.491428 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.491448 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.491463 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:03Z","lastTransitionTime":"2025-12-05T10:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.493769 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.505550 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.516677 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.527578 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a56626-38db-48e9-890b-6fb4e3f18361\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.541735 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.555260 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.568894 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.583398 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.594244 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.594346 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.594390 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.594414 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.594429 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:03Z","lastTransitionTime":"2025-12-05T10:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.595252 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.605768 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:03Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.696567 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.696607 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.696617 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.696635 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.696646 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:03Z","lastTransitionTime":"2025-12-05T10:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.799780 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.799829 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.799838 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.799855 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.799865 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:03Z","lastTransitionTime":"2025-12-05T10:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.903721 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.903787 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.903799 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.903820 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:03 crc kubenswrapper[5014]: I1205 10:49:03.903834 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:03Z","lastTransitionTime":"2025-12-05T10:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.006582 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.006626 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.006635 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.006655 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.006665 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:04Z","lastTransitionTime":"2025-12-05T10:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.109517 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.109559 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.109568 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.109583 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.109593 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:04Z","lastTransitionTime":"2025-12-05T10:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.213037 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.213096 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.213111 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.213140 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.213156 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:04Z","lastTransitionTime":"2025-12-05T10:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.316524 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.316610 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.316621 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.316642 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.316653 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:04Z","lastTransitionTime":"2025-12-05T10:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.419978 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.420023 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.420033 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.420049 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.420058 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:04Z","lastTransitionTime":"2025-12-05T10:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.522429 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.522485 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.522496 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.522515 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.522528 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:04Z","lastTransitionTime":"2025-12-05T10:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.625082 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.625139 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.625153 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.625173 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.625189 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:04Z","lastTransitionTime":"2025-12-05T10:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.728885 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.728957 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.728969 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.728988 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.729002 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:04Z","lastTransitionTime":"2025-12-05T10:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.831923 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.831965 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.831976 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.831995 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.832008 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:04Z","lastTransitionTime":"2025-12-05T10:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.934429 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.934507 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.934529 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.934558 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:04 crc kubenswrapper[5014]: I1205 10:49:04.934578 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:04Z","lastTransitionTime":"2025-12-05T10:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.037411 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.037460 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.037470 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.037488 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.037501 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.140696 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.140748 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.140765 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.140790 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.140807 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.244479 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.244555 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.244573 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.244602 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.244621 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.317851 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.317939 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.318013 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.318026 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:05 crc kubenswrapper[5014]: E1205 10:49:05.318153 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:05 crc kubenswrapper[5014]: E1205 10:49:05.318378 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:05 crc kubenswrapper[5014]: E1205 10:49:05.318584 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:05 crc kubenswrapper[5014]: E1205 10:49:05.318666 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.347887 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.347956 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.347971 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.347991 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.348004 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.450861 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.450941 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.450955 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.450977 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.450991 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.541891 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.541967 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.541981 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.542002 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.542020 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: E1205 10:49:05.556233 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:05Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.560097 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.560151 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.560163 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.560184 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.560203 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: E1205 10:49:05.573087 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:05Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.576672 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.576752 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.576764 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.576784 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.576802 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: E1205 10:49:05.592482 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:05Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.596942 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.596975 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.596984 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.597002 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.597014 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: E1205 10:49:05.609994 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:05Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.614249 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.614335 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.614350 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.614375 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.614391 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: E1205 10:49:05.628788 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:05Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:05 crc kubenswrapper[5014]: E1205 10:49:05.628978 5014 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.631389 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.631427 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.631438 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.631455 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.631469 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.734906 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.734965 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.734978 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.735001 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.735014 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.838319 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.838397 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.838413 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.838436 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.838451 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.941455 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.941528 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.941546 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.941574 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:05 crc kubenswrapper[5014]: I1205 10:49:05.941593 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:05Z","lastTransitionTime":"2025-12-05T10:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.044303 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.044363 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.044372 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.044388 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.044397 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:06Z","lastTransitionTime":"2025-12-05T10:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.146374 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.146420 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.146428 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.146446 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.146457 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:06Z","lastTransitionTime":"2025-12-05T10:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.250080 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.250133 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.250147 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.250170 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.250187 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:06Z","lastTransitionTime":"2025-12-05T10:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.353253 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.353343 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.353358 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.353379 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.353393 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:06Z","lastTransitionTime":"2025-12-05T10:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.457055 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.457135 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.457153 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.457176 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.457194 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:06Z","lastTransitionTime":"2025-12-05T10:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.560945 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.561019 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.561034 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.561058 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.561076 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:06Z","lastTransitionTime":"2025-12-05T10:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.664890 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.664969 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.664994 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.665024 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.665049 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:06Z","lastTransitionTime":"2025-12-05T10:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.769327 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.769378 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.769397 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.769421 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.769437 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:06Z","lastTransitionTime":"2025-12-05T10:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.873511 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.873574 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.873592 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.873662 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.873683 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:06Z","lastTransitionTime":"2025-12-05T10:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.977226 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.977292 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.977303 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.977322 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:06 crc kubenswrapper[5014]: I1205 10:49:06.977337 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:06Z","lastTransitionTime":"2025-12-05T10:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.081550 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.081608 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.081624 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.081648 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.081664 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:07Z","lastTransitionTime":"2025-12-05T10:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.184850 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.184904 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.184921 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.184971 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.184989 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:07Z","lastTransitionTime":"2025-12-05T10:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.288232 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.288688 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.288810 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.288946 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.289072 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:07Z","lastTransitionTime":"2025-12-05T10:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.318117 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.318185 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.318194 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.318368 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:07 crc kubenswrapper[5014]: E1205 10:49:07.318369 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:07 crc kubenswrapper[5014]: E1205 10:49:07.318547 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:07 crc kubenswrapper[5014]: E1205 10:49:07.318685 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:07 crc kubenswrapper[5014]: E1205 10:49:07.318791 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.392052 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.392120 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.392138 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.392160 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.392176 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:07Z","lastTransitionTime":"2025-12-05T10:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.496245 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.496350 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.496371 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.496396 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.496414 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:07Z","lastTransitionTime":"2025-12-05T10:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.600786 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.600873 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.600895 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.600928 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.600950 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:07Z","lastTransitionTime":"2025-12-05T10:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.708652 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.708702 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.708719 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.708742 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.708764 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:07Z","lastTransitionTime":"2025-12-05T10:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.812419 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.812935 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.813040 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.813168 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.813304 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:07Z","lastTransitionTime":"2025-12-05T10:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.916867 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.916926 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.916942 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.916963 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:07 crc kubenswrapper[5014]: I1205 10:49:07.916976 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:07Z","lastTransitionTime":"2025-12-05T10:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.020669 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.020732 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.020745 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.020768 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.020782 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:08Z","lastTransitionTime":"2025-12-05T10:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.123557 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.123646 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.123690 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.123712 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.123725 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:08Z","lastTransitionTime":"2025-12-05T10:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.227415 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.227462 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.227471 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.227489 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.227500 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:08Z","lastTransitionTime":"2025-12-05T10:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.331941 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.332024 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.332034 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.332051 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.332061 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:08Z","lastTransitionTime":"2025-12-05T10:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.434419 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.434483 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.434500 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.434524 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.434544 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:08Z","lastTransitionTime":"2025-12-05T10:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.537191 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.537250 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.537265 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.537323 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.537339 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:08Z","lastTransitionTime":"2025-12-05T10:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.640773 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.640816 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.640829 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.640846 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.640858 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:08Z","lastTransitionTime":"2025-12-05T10:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.745991 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.746376 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.746499 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.746591 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.746669 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:08Z","lastTransitionTime":"2025-12-05T10:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.849327 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.849405 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.849422 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.849446 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.849460 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:08Z","lastTransitionTime":"2025-12-05T10:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.952470 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.952549 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.952559 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.952579 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:08 crc kubenswrapper[5014]: I1205 10:49:08.952590 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:08Z","lastTransitionTime":"2025-12-05T10:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.055513 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.055588 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.055599 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.055625 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.055639 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:09Z","lastTransitionTime":"2025-12-05T10:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.158333 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.158388 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.158398 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.158421 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.158434 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:09Z","lastTransitionTime":"2025-12-05T10:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.262353 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.262428 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.262448 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.262483 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.262504 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:09Z","lastTransitionTime":"2025-12-05T10:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.317634 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.317713 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.317655 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.317654 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:09 crc kubenswrapper[5014]: E1205 10:49:09.317840 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:09 crc kubenswrapper[5014]: E1205 10:49:09.317963 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:09 crc kubenswrapper[5014]: E1205 10:49:09.318035 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:09 crc kubenswrapper[5014]: E1205 10:49:09.318081 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.365701 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.365753 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.365768 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.365788 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.365802 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:09Z","lastTransitionTime":"2025-12-05T10:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.469161 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.469243 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.469261 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.469340 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.469367 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:09Z","lastTransitionTime":"2025-12-05T10:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.573160 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.573223 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.573236 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.573258 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.573288 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:09Z","lastTransitionTime":"2025-12-05T10:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.676307 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.676355 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.676364 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.676379 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.676389 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:09Z","lastTransitionTime":"2025-12-05T10:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.780448 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.780512 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.780530 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.780554 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.780573 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:09Z","lastTransitionTime":"2025-12-05T10:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.884401 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.884463 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.884482 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.884508 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.884527 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:09Z","lastTransitionTime":"2025-12-05T10:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.987817 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.987869 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.987880 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.987899 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:09 crc kubenswrapper[5014]: I1205 10:49:09.987912 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:09Z","lastTransitionTime":"2025-12-05T10:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.091681 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.091746 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.091758 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.091779 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.091796 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:10Z","lastTransitionTime":"2025-12-05T10:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.194524 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.194597 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.194621 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.194652 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.194679 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:10Z","lastTransitionTime":"2025-12-05T10:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.299085 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.299162 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.299180 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.299207 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.299226 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:10Z","lastTransitionTime":"2025-12-05T10:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.404062 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.404148 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.404173 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.404217 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.404248 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:10Z","lastTransitionTime":"2025-12-05T10:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.507936 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.508020 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.508038 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.508069 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.508087 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:10Z","lastTransitionTime":"2025-12-05T10:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.623030 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.623097 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.623112 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.623135 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.623153 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:10Z","lastTransitionTime":"2025-12-05T10:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.726628 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.726687 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.726698 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.726718 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.726731 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:10Z","lastTransitionTime":"2025-12-05T10:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.831157 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.831250 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.831265 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.831308 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.831323 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:10Z","lastTransitionTime":"2025-12-05T10:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.934016 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.934058 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.934070 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.934088 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:10 crc kubenswrapper[5014]: I1205 10:49:10.934100 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:10Z","lastTransitionTime":"2025-12-05T10:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.037756 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.037816 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.037834 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.037858 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.037877 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:11Z","lastTransitionTime":"2025-12-05T10:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.140341 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.140407 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.140427 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.140455 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.140473 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:11Z","lastTransitionTime":"2025-12-05T10:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.244724 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.244808 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.244833 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.244872 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.244896 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:11Z","lastTransitionTime":"2025-12-05T10:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.317977 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:11 crc kubenswrapper[5014]: E1205 10:49:11.318200 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.318298 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.318369 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:11 crc kubenswrapper[5014]: E1205 10:49:11.318479 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.318546 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.319658 5014 scope.go:117] "RemoveContainer" containerID="6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad" Dec 05 10:49:11 crc kubenswrapper[5014]: E1205 10:49:11.320350 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:11 crc kubenswrapper[5014]: E1205 10:49:11.320605 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.349813 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.349878 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.349889 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.349907 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.349918 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:11Z","lastTransitionTime":"2025-12-05T10:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.452612 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.452672 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.452686 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.452708 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.452723 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:11Z","lastTransitionTime":"2025-12-05T10:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.556888 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.556958 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.556978 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.557008 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.557031 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:11Z","lastTransitionTime":"2025-12-05T10:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.660319 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.660397 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.660414 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.660439 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.660486 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:11Z","lastTransitionTime":"2025-12-05T10:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.763948 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.764029 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.764056 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.764091 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.764115 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:11Z","lastTransitionTime":"2025-12-05T10:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.868372 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.868473 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.868501 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.868535 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.868561 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:11Z","lastTransitionTime":"2025-12-05T10:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.972246 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.972333 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.972345 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.972375 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:11 crc kubenswrapper[5014]: I1205 10:49:11.972389 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:11Z","lastTransitionTime":"2025-12-05T10:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.075830 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.075894 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.075907 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.075928 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.075943 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:12Z","lastTransitionTime":"2025-12-05T10:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.170156 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/2.log" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.173055 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5"} Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.178217 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.178301 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.178320 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.178340 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.178356 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:12Z","lastTransitionTime":"2025-12-05T10:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.281521 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.281585 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.281603 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.281622 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.281634 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:12Z","lastTransitionTime":"2025-12-05T10:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.385310 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.385384 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.385405 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.385429 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.385443 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:12Z","lastTransitionTime":"2025-12-05T10:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.492534 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.493355 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.493476 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.493632 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.493716 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:12Z","lastTransitionTime":"2025-12-05T10:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.596845 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.596904 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.596916 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.596939 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.596954 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:12Z","lastTransitionTime":"2025-12-05T10:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.700159 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.700235 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.700253 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.700296 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.700313 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:12Z","lastTransitionTime":"2025-12-05T10:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.803832 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.803915 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.803934 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.803962 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.803982 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:12Z","lastTransitionTime":"2025-12-05T10:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.907896 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.907958 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.907971 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.907998 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:12 crc kubenswrapper[5014]: I1205 10:49:12.908011 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:12Z","lastTransitionTime":"2025-12-05T10:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.010512 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.010552 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.010564 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.010583 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.010597 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:13Z","lastTransitionTime":"2025-12-05T10:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.113124 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.113170 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.113181 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.113197 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.113209 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:13Z","lastTransitionTime":"2025-12-05T10:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.177399 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.197780 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.213060 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.216536 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.216572 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.216587 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.216607 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.216621 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:13Z","lastTransitionTime":"2025-12-05T10:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.223177 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.234045 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.249586 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a56626-38db-48e9-890b-6fb4e3f18361\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.265483 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.282836 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.296037 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.316615 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:43Z\\\",\\\"message\\\":\\\"in network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:42.617861 6639 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-operators]} name:Service_openshift-marketplace/redhat-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.138:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97419c58-41c7-41d7-a137-a446f0c7eeb3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 10:48:42.614887 6639 services_controller.go:451] Built service openshift-kube-apiserver-operator/metr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:49:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.317365 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.317385 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.317401 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.317359 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:13 crc kubenswrapper[5014]: E1205 10:49:13.317545 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:13 crc kubenswrapper[5014]: E1205 10:49:13.317604 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:13 crc kubenswrapper[5014]: E1205 10:49:13.317681 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:13 crc kubenswrapper[5014]: E1205 10:49:13.317744 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.319714 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.319742 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.319842 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.319867 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.319922 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:13Z","lastTransitionTime":"2025-12-05T10:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.338642 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.352672 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.367093 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.383144 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.400358 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:00Z\\\",\\\"message\\\":\\\"2025-12-05T10:48:15+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df\\\\n2025-12-05T10:48:15+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df to /host/opt/cni/bin/\\\\n2025-12-05T10:48:15Z [verbose] multus-daemon started\\\\n2025-12-05T10:48:15Z [verbose] Readiness Indicator file check\\\\n2025-12-05T10:49:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:49:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.415387 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.423545 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.423596 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.423606 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.423625 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.423641 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:13Z","lastTransitionTime":"2025-12-05T10:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.430252 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.442182 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.451985 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.466221 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:00Z\\\",\\\"message\\\":\\\"2025-12-05T10:48:15+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df\\\\n2025-12-05T10:48:15+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df to /host/opt/cni/bin/\\\\n2025-12-05T10:48:15Z [verbose] multus-daemon started\\\\n2025-12-05T10:48:15Z [verbose] Readiness Indicator file check\\\\n2025-12-05T10:49:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:49:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.478231 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.493503 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.506598 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.519720 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.526134 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.526197 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.526211 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.526238 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.526318 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:13Z","lastTransitionTime":"2025-12-05T10:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.534981 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.550056 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.560539 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.572482 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.586957 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a56626-38db-48e9-890b-6fb4e3f18361\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.606848 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.621903 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.629539 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.629587 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.629598 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.629617 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.629630 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:13Z","lastTransitionTime":"2025-12-05T10:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.637971 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.658009 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:43Z\\\",\\\"message\\\":\\\"in network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:42.617861 6639 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-operators]} name:Service_openshift-marketplace/redhat-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.138:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97419c58-41c7-41d7-a137-a446f0c7eeb3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 10:48:42.614887 6639 services_controller.go:451] Built service openshift-kube-apiserver-operator/metr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:49:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.679604 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.692644 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.705541 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.724071 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:13Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.732403 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.732446 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.732503 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.732529 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.732547 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:13Z","lastTransitionTime":"2025-12-05T10:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.835755 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.835812 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.835821 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.835842 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.835854 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:13Z","lastTransitionTime":"2025-12-05T10:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.938497 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.938560 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.938576 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.938599 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:13 crc kubenswrapper[5014]: I1205 10:49:13.938616 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:13Z","lastTransitionTime":"2025-12-05T10:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.041154 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.041203 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.041216 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.041235 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.041248 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:14Z","lastTransitionTime":"2025-12-05T10:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.143758 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.143843 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.143873 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.143891 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.143902 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:14Z","lastTransitionTime":"2025-12-05T10:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.183178 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/3.log" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.184489 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/2.log" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.188775 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" exitCode=1 Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.188858 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.188939 5014 scope.go:117] "RemoveContainer" containerID="6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.190356 5014 scope.go:117] "RemoveContainer" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 10:49:14 crc kubenswrapper[5014]: E1205 10:49:14.190687 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.212577 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.226840 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.245925 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.247127 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.247182 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.247198 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.247222 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.247238 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:14Z","lastTransitionTime":"2025-12-05T10:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.264670 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.288882 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c5be208747431fcde2902fcb023601e3f7625fef2694dbef9d9d2c86415cbad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:48:43Z\\\",\\\"message\\\":\\\"in network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:48:42Z is after 2025-08-24T17:21:41Z]\\\\nI1205 10:48:42.617861 6639 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/redhat-operators]} name:Service_openshift-marketplace/redhat-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.138:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {97419c58-41c7-41d7-a137-a446f0c7eeb3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 10:48:42.614887 6639 services_controller.go:451] Built service openshift-kube-apiserver-operator/metr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:13Z\\\",\\\"message\\\":\\\"tDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 10:49:13.368919 6992 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 10:49:13.368928 6992 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 10:49:13.368952 6992 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 10:49:13.368973 6992 factory.go:656] Stopping watch factory\\\\nI1205 10:49:13.368990 6992 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 10:49:13.369504 6992 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 10:49:13.380363 6992 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1205 10:49:13.380395 6992 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1205 10:49:13.380461 6992 ovnkube.go:599] Stopped ovnkube\\\\nI1205 10:49:13.380496 6992 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 10:49:13.380610 6992 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:49:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.307348 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:00Z\\\",\\\"message\\\":\\\"2025-12-05T10:48:15+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df\\\\n2025-12-05T10:48:15+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df to /host/opt/cni/bin/\\\\n2025-12-05T10:48:15Z [verbose] multus-daemon started\\\\n2025-12-05T10:48:15Z [verbose] Readiness Indicator file check\\\\n2025-12-05T10:49:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:49:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.320680 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.332508 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.344795 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.349958 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.350012 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.350022 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.350041 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.350051 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:14Z","lastTransitionTime":"2025-12-05T10:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.356034 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.368383 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.379625 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a56626-38db-48e9-890b-6fb4e3f18361\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.395864 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.408767 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.422393 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.432503 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.447700 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.452753 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.452807 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.452822 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.452843 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.452857 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:14Z","lastTransitionTime":"2025-12-05T10:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.457289 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:14Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.555719 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.555772 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.555789 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.555809 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.555820 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:14Z","lastTransitionTime":"2025-12-05T10:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.658963 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.659037 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.659052 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.659075 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.659090 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:14Z","lastTransitionTime":"2025-12-05T10:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.761964 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.762031 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.762044 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.762067 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.762082 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:14Z","lastTransitionTime":"2025-12-05T10:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.865040 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.865125 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.865143 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.865163 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.865184 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:14Z","lastTransitionTime":"2025-12-05T10:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.970788 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.970868 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.970886 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.970915 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:14 crc kubenswrapper[5014]: I1205 10:49:14.970936 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:14Z","lastTransitionTime":"2025-12-05T10:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.074784 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.074841 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.074856 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.074879 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.074893 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.178382 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.178457 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.178474 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.178503 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.178523 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.196082 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/3.log" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.202174 5014 scope.go:117] "RemoveContainer" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.202598 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.221291 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.236260 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a56626-38db-48e9-890b-6fb4e3f18361\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.253415 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.271124 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.281655 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.281700 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.281737 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.281758 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.281771 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.287195 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.301565 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.314499 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.317637 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.317745 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.317637 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.317802 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.317812 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.317908 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.318231 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.318536 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.328010 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.357324 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.357504 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:19.357472711 +0000 UTC m=+146.305590425 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.357567 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.357519 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.357642 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.357763 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.357789 5014 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.357851 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:50:19.35783587 +0000 UTC m=+146.305953584 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.357938 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.357979 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.357996 5014 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.358062 5014 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.358129 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 10:50:19.358105436 +0000 UTC m=+146.306223140 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.358154 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 10:50:19.358145488 +0000 UTC m=+146.306263192 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.373693 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.384723 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.384799 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.384823 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.385266 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.385578 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.390341 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.406647 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.429801 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:13Z\\\",\\\"message\\\":\\\"tDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 10:49:13.368919 6992 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 10:49:13.368928 6992 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 10:49:13.368952 6992 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 10:49:13.368973 6992 factory.go:656] Stopping watch factory\\\\nI1205 10:49:13.368990 6992 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 10:49:13.369504 6992 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 10:49:13.380363 6992 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1205 10:49:13.380395 6992 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1205 10:49:13.380461 6992 ovnkube.go:599] Stopped ovnkube\\\\nI1205 10:49:13.380496 6992 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 10:49:13.380610 6992 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:49:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.445775 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:00Z\\\",\\\"message\\\":\\\"2025-12-05T10:48:15+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df\\\\n2025-12-05T10:48:15+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df to /host/opt/cni/bin/\\\\n2025-12-05T10:48:15Z [verbose] multus-daemon started\\\\n2025-12-05T10:48:15Z [verbose] Readiness Indicator file check\\\\n2025-12-05T10:49:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:49:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.459468 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.459507 5014 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.459526 5014 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.459590 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 10:50:19.459567774 +0000 UTC m=+146.407685478 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.459502 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.459428 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.477254 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.490164 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.490236 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.490247 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.490284 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.490299 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.492338 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.505764 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.593947 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.594035 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.594062 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.594097 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.594125 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.637915 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.637976 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.637996 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.638020 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.638035 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.663505 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.670772 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.670836 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.670856 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.670884 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.670907 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.690530 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.696422 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.696486 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.696496 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.696516 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.696528 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.715149 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.720959 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.721039 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.721058 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.721090 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.721108 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.745599 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.751318 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.751430 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.751450 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.751472 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.751485 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.772152 5014 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"553a7abf-1287-4c60-9edc-6cc1ccaed34a\\\",\\\"systemUUID\\\":\\\"85087ce8-dc93-48b4-8df2-1d14cd5a8c8f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:15Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:15 crc kubenswrapper[5014]: E1205 10:49:15.772358 5014 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.775261 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.775326 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.775338 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.775357 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.775372 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.878432 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.878484 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.878494 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.878509 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.878521 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.981533 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.981609 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.981627 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.981656 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:15 crc kubenswrapper[5014]: I1205 10:49:15.981676 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:15Z","lastTransitionTime":"2025-12-05T10:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.086107 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.086183 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.086195 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.086214 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.086226 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:16Z","lastTransitionTime":"2025-12-05T10:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.189573 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.189724 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.189752 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.189786 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.189811 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:16Z","lastTransitionTime":"2025-12-05T10:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.292591 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.293115 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.293130 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.293158 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.293174 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:16Z","lastTransitionTime":"2025-12-05T10:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.396493 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.396560 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.396572 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.396590 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.396603 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:16Z","lastTransitionTime":"2025-12-05T10:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.498946 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.499024 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.499042 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.499069 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.499092 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:16Z","lastTransitionTime":"2025-12-05T10:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.602548 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.602614 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.602634 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.602667 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.602689 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:16Z","lastTransitionTime":"2025-12-05T10:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.705631 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.705710 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.705734 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.705764 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.705787 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:16Z","lastTransitionTime":"2025-12-05T10:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.809230 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.809303 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.809313 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.809330 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.809342 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:16Z","lastTransitionTime":"2025-12-05T10:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.914099 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.914196 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.914215 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.914244 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:16 crc kubenswrapper[5014]: I1205 10:49:16.914309 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:16Z","lastTransitionTime":"2025-12-05T10:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.017636 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.017717 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.017737 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.017765 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.017792 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:17Z","lastTransitionTime":"2025-12-05T10:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.121745 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.121800 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.121809 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.121826 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.121836 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:17Z","lastTransitionTime":"2025-12-05T10:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.224410 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.224474 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.224490 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.224517 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.224537 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:17Z","lastTransitionTime":"2025-12-05T10:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.318395 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.318504 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.318590 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.318417 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:17 crc kubenswrapper[5014]: E1205 10:49:17.318682 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:17 crc kubenswrapper[5014]: E1205 10:49:17.318796 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:17 crc kubenswrapper[5014]: E1205 10:49:17.318908 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:17 crc kubenswrapper[5014]: E1205 10:49:17.318988 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.326967 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.327023 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.327038 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.327063 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.327078 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:17Z","lastTransitionTime":"2025-12-05T10:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.429762 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.429807 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.429817 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.429833 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.429844 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:17Z","lastTransitionTime":"2025-12-05T10:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.532772 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.532842 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.532851 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.532871 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.532881 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:17Z","lastTransitionTime":"2025-12-05T10:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.636986 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.637062 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.637082 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.637110 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.637132 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:17Z","lastTransitionTime":"2025-12-05T10:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.740605 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.740663 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.740674 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.740695 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.740709 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:17Z","lastTransitionTime":"2025-12-05T10:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.845388 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.845483 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.845501 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.845530 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.845550 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:17Z","lastTransitionTime":"2025-12-05T10:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.949397 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.949457 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.949467 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.949487 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:17 crc kubenswrapper[5014]: I1205 10:49:17.949501 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:17Z","lastTransitionTime":"2025-12-05T10:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.053697 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.053781 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.053801 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.053831 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.053852 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:18Z","lastTransitionTime":"2025-12-05T10:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.157479 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.157528 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.157541 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.157567 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.157582 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:18Z","lastTransitionTime":"2025-12-05T10:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.260887 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.260938 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.260973 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.260993 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.261005 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:18Z","lastTransitionTime":"2025-12-05T10:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.364966 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.365057 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.365075 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.365102 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.365124 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:18Z","lastTransitionTime":"2025-12-05T10:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.468883 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.468965 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.468989 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.469021 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.469043 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:18Z","lastTransitionTime":"2025-12-05T10:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.573200 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.573321 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.573348 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.573377 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.573403 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:18Z","lastTransitionTime":"2025-12-05T10:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.677537 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.677631 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.677653 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.677682 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.677705 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:18Z","lastTransitionTime":"2025-12-05T10:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.780231 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.780385 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.780403 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.780454 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.780470 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:18Z","lastTransitionTime":"2025-12-05T10:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.884004 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.884075 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.884093 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.884344 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.884365 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:18Z","lastTransitionTime":"2025-12-05T10:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.987829 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.987910 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.987936 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.987977 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:18 crc kubenswrapper[5014]: I1205 10:49:18.988004 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:18Z","lastTransitionTime":"2025-12-05T10:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.091598 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.091667 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.091706 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.091743 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.091766 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:19Z","lastTransitionTime":"2025-12-05T10:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.194818 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.194886 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.194905 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.194932 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.194951 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:19Z","lastTransitionTime":"2025-12-05T10:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.298314 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.298392 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.298411 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.298437 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.298454 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:19Z","lastTransitionTime":"2025-12-05T10:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.317637 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.317716 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.317666 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.317659 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:19 crc kubenswrapper[5014]: E1205 10:49:19.317822 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:19 crc kubenswrapper[5014]: E1205 10:49:19.318004 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:19 crc kubenswrapper[5014]: E1205 10:49:19.318029 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:19 crc kubenswrapper[5014]: E1205 10:49:19.318126 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.401144 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.401214 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.401236 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.401266 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.401326 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:19Z","lastTransitionTime":"2025-12-05T10:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.505349 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.505419 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.505436 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.505460 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.505474 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:19Z","lastTransitionTime":"2025-12-05T10:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.608547 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.608627 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.608640 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.608658 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.608670 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:19Z","lastTransitionTime":"2025-12-05T10:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.711605 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.711651 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.711661 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.711676 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.711685 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:19Z","lastTransitionTime":"2025-12-05T10:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.814958 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.815027 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.815049 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.815076 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.815097 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:19Z","lastTransitionTime":"2025-12-05T10:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.917822 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.917874 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.917884 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.917902 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:19 crc kubenswrapper[5014]: I1205 10:49:19.917914 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:19Z","lastTransitionTime":"2025-12-05T10:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.020603 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.020717 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.020749 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.020788 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.020810 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:20Z","lastTransitionTime":"2025-12-05T10:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.123805 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.123861 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.123870 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.123886 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.123896 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:20Z","lastTransitionTime":"2025-12-05T10:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.228767 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.228841 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.228860 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.228887 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.228906 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:20Z","lastTransitionTime":"2025-12-05T10:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.333104 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.333150 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.333161 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.333179 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.333192 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:20Z","lastTransitionTime":"2025-12-05T10:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.440647 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.440698 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.440709 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.440730 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.440743 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:20Z","lastTransitionTime":"2025-12-05T10:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.542868 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.542923 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.542933 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.542951 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.542963 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:20Z","lastTransitionTime":"2025-12-05T10:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.646066 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.646113 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.646123 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.646142 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.646154 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:20Z","lastTransitionTime":"2025-12-05T10:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.749043 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.749096 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.749112 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.749134 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.749147 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:20Z","lastTransitionTime":"2025-12-05T10:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.851909 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.851958 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.851970 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.851989 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.852002 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:20Z","lastTransitionTime":"2025-12-05T10:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.955244 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.955344 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.955357 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.955376 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:20 crc kubenswrapper[5014]: I1205 10:49:20.955388 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:20Z","lastTransitionTime":"2025-12-05T10:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.058205 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.058308 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.058330 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.058358 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.058379 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:21Z","lastTransitionTime":"2025-12-05T10:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.161791 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.161854 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.161868 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.161896 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.161913 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:21Z","lastTransitionTime":"2025-12-05T10:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.265762 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.265862 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.265899 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.265923 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.265936 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:21Z","lastTransitionTime":"2025-12-05T10:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.317958 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.318039 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.318054 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.318241 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:21 crc kubenswrapper[5014]: E1205 10:49:21.318219 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:21 crc kubenswrapper[5014]: E1205 10:49:21.318461 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:21 crc kubenswrapper[5014]: E1205 10:49:21.318590 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:21 crc kubenswrapper[5014]: E1205 10:49:21.318958 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.369520 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.369596 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.369638 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.369673 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.369700 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:21Z","lastTransitionTime":"2025-12-05T10:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.474810 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.474901 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.474920 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.474950 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.474969 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:21Z","lastTransitionTime":"2025-12-05T10:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.578052 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.578104 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.578116 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.578134 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.578145 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:21Z","lastTransitionTime":"2025-12-05T10:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.681342 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.681430 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.681469 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.681492 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.681510 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:21Z","lastTransitionTime":"2025-12-05T10:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.784710 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.784789 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.784808 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.784836 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.784858 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:21Z","lastTransitionTime":"2025-12-05T10:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.887922 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.888023 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.888041 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.888073 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.888095 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:21Z","lastTransitionTime":"2025-12-05T10:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.991303 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.991427 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.991480 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.991505 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:21 crc kubenswrapper[5014]: I1205 10:49:21.991523 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:21Z","lastTransitionTime":"2025-12-05T10:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.095218 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.095364 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.095386 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.095414 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.095434 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:22Z","lastTransitionTime":"2025-12-05T10:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.198903 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.198976 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.198986 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.199010 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.199024 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:22Z","lastTransitionTime":"2025-12-05T10:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.302609 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.302688 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.302710 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.302738 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.302756 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:22Z","lastTransitionTime":"2025-12-05T10:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.406189 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.406576 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.406651 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.406685 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.406883 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:22Z","lastTransitionTime":"2025-12-05T10:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.510160 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.510228 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.510249 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.510304 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.510321 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:22Z","lastTransitionTime":"2025-12-05T10:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.613842 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.613912 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.613938 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.613968 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.613985 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:22Z","lastTransitionTime":"2025-12-05T10:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.716953 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.717022 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.717039 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.717067 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.717085 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:22Z","lastTransitionTime":"2025-12-05T10:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.820631 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.820720 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.820738 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.820766 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.820784 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:22Z","lastTransitionTime":"2025-12-05T10:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.923821 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.923883 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.923895 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.923923 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:22 crc kubenswrapper[5014]: I1205 10:49:22.923935 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:22Z","lastTransitionTime":"2025-12-05T10:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.027005 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.027040 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.027049 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.027063 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.027074 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:23Z","lastTransitionTime":"2025-12-05T10:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.130368 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.130464 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.130476 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.130502 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.130527 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:23Z","lastTransitionTime":"2025-12-05T10:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.233078 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.233125 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.233135 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.233152 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.233165 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:23Z","lastTransitionTime":"2025-12-05T10:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.317432 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.317616 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.317658 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:23 crc kubenswrapper[5014]: E1205 10:49:23.317791 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.320704 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:23 crc kubenswrapper[5014]: E1205 10:49:23.321012 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:23 crc kubenswrapper[5014]: E1205 10:49:23.321500 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:23 crc kubenswrapper[5014]: E1205 10:49:23.321743 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.332499 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.337745 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.337814 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.337828 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.337851 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.337896 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:23Z","lastTransitionTime":"2025-12-05T10:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.339310 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40758748-4038-4c7b-bd23-63dd56a8ecc4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://621eed095a29d65debb305ac9c0a7c8e823a663fba84d8d1e29252e8f7e9b8f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eef6e9376e7869f790836cedf88f0f41c7fb1c4737770f37355490ec9cc5592a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab76cfa9ef392d779fa34383f9a27eed8ed7d31a14ec7e9ac19083317e7bbf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59847a6d1ab562645390a2ea392f13b415db7ad26cef032a5a516d08a57af657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eec62807b574f3121dc4e08344a68c07e527d1634e8c11e3046a25da71af900b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f500a12c636d7c6809a1b7bbe1e2c634efe7ee303d9ac052c7dc61b8651547cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75535493ff31823b1216f0eadbd1ce2fc436e0c61a4f9812a74a471ff5617a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ee89212d9efcb51de939be304bd82614ff4195df2e7e6fc95e8a1b776c2af17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.353932 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.368326 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65c2efcf875c5ebee7f2d9fba4a2135ebcff81f614d90516ddc2b81ec1573c54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9725568a12a13de29143d454d93eea34b57966658b8fcf0be24f25d4d5eeb97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.383578 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc769555-0222-432b-bc44-d0d75873d48c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28c96e64749c18f09f81dccae530657de6bbd55e89cea4636bd566cdc2cb77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e908f34217c0fd6b60c18324f05ff152bb8b4c0a8be014afa977acebb44ec13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ff4a88b5001856de71db0be26dca02f6cba6ae90ba7e8e4d39c211ac2dade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d7d1f44e5d978d162701e47115ae15f3ce7922eef3e6cd56b741131926319f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd098f014862a2a134f7dd73f5b09d982cc37596d8240a3cbdb673278585c782\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ab47c44263361f78c6d653324b634f63e55febc151ea28dc5755c0cd1a01fad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9d2f0d44f8188c8315134114560edf35a4bf2f654e583fdbf577cc1d1ca1867\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sqmht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lkk2g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.403550 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41fb1a99-1c51-4281-b73f-8a29357a0a2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:13Z\\\",\\\"message\\\":\\\"tDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 10:49:13.368919 6992 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 10:49:13.368928 6992 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 10:49:13.368952 6992 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 10:49:13.368973 6992 factory.go:656] Stopping watch factory\\\\nI1205 10:49:13.368990 6992 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 10:49:13.369504 6992 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 10:49:13.380363 6992 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1205 10:49:13.380395 6992 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1205 10:49:13.380461 6992 ovnkube.go:599] Stopped ovnkube\\\\nI1205 10:49:13.380496 6992 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 10:49:13.380610 6992 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:49:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gqw9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-znfbl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.418446 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-424mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f8198e15-3b7a-4c40-b4b3-63382eba5846\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:49:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T10:49:00Z\\\",\\\"message\\\":\\\"2025-12-05T10:48:15+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df\\\\n2025-12-05T10:48:15+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ffebdea3-951d-4e36-9b16-6506a67e62df to /host/opt/cni/bin/\\\\n2025-12-05T10:48:15Z [verbose] multus-daemon started\\\\n2025-12-05T10:48:15Z [verbose] Readiness Indicator file check\\\\n2025-12-05T10:49:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:49:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xw77t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-424mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.432125 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7e002dc5-a637-47bf-a201-4117a4fff39b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06b4477d9c74658ab4cdcd7bb98e7a4128b6f2f33555b0f8f566bbfe6fd05d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2f5fe808c4981e324a48d417cdc34c0c918f58f3d6250f0a5ec30fe62811aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqwrp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-w4trt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.440840 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.440884 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.440898 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.440920 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.440933 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:23Z","lastTransitionTime":"2025-12-05T10:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.447107 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29258dda3e1ddbd67f9084f594ac990a3048b6d3b19373c4370980f881007d79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.460781 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.472055 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-h6gwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7894914-db6a-40a3-b46a-bf9e3a6b7fad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7680a476dd8b28dcdb1e25d6fdbedf636ddccacc7616f9a52684e501c8944493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l445d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-h6gwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.485412 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sk4qz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ba85bb5-ce90-4f2d-bbbc-103d4c3fb285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60d9dae7a40f7f5de6f02a4221bd0d3d36d92db268c95f0f8bd2a12e988f8c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvxsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sk4qz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.495423 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97abc013-62da-459c-b7ec-2a78304dcc56\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zvsr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vrt2x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.506191 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a56626-38db-48e9-890b-6fb4e3f18361\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4306a1aeee0b71dae0f9780dc5255bb4efbc68e8d43fdc2bdcf53ed63e36a53f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96cd62fd8608fa04a177db714b1de8df0453179c531bd7455c9d86ae7723b213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c30b7bb83b59a1be95c11da0eb868c792f866a03ea984152a526dc654b39b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0d149e7f72a225c32ef72a93c04a2d7e48708698ed6665a2590040609aca9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.518609 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"140b5f3a-5162-4b57-9e19-5701294f91c9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"t denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 10:48:11.240014 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 10:48:11.240041 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240049 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 10:48:11.240054 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 10:48:11.240057 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 10:48:11.240060 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 10:48:11.240063 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 10:48:11.240069 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 10:48:11.243984 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3172430146/tls.crt::/tmp/serving-cert-3172430146/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764931675\\\\\\\\\\\\\\\" (2025-12-05 10:47:54 +0000 UTC to 2026-01-04 10:47:55 +0000 UTC (now=2025-12-05 10:48:11.243921051 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244102 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764931691\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764931690\\\\\\\\\\\\\\\" (2025-12-05 09:48:10 +0000 UTC to 2026-12-05 09:48:10 +0000 UTC (now=2025-12-05 10:48:11.244082264 +0000 UTC))\\\\\\\"\\\\nI1205 10:48:11.244123 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1205 10:48:11.244146 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1205 10:48:11.244228 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T10:47:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.531013 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16b90fba-8a65-47e2-9124-9efb51955ba3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:47:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bbf96f9faa7c4edfadf1b284933e3812b7a623580d485fb212fa5842e27e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228dbe7100f0d7bd965d806ee2298ec4c09cf3a64cf8ad5d51c153741ad58cb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64215510769e6ccc2b5aa3325a41b0d79b9ec975ecada9d407aa8d9a87bdab6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:47:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:47:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.544034 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.544077 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.544088 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.544109 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.544122 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:23Z","lastTransitionTime":"2025-12-05T10:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.546864 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.560964 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34a335e9295147216f6c184c70c9aae9f89d066dddb0eea5c8a2a288b04798a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.574361 5014 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c07b133-0b3c-4d10-95f9-23167e184681\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T10:48:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01149ef0bc74eff9432ee402866f27aa3c86aa7cd73d3eb07794ef3cd17767cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T10:48:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wg4sq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T10:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cvtv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T10:49:23Z is after 2025-08-24T17:21:41Z" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.648187 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.648239 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.648281 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.648302 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.648315 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:23Z","lastTransitionTime":"2025-12-05T10:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.751675 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.751794 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.751819 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.751849 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.751867 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:23Z","lastTransitionTime":"2025-12-05T10:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.855403 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.855442 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.855452 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.855468 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.855481 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:23Z","lastTransitionTime":"2025-12-05T10:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.958614 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.958688 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.958704 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.958729 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:23 crc kubenswrapper[5014]: I1205 10:49:23.958750 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:23Z","lastTransitionTime":"2025-12-05T10:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.062531 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.062610 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.062628 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.062654 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.062673 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:24Z","lastTransitionTime":"2025-12-05T10:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.165789 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.165848 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.165862 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.165881 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.165895 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:24Z","lastTransitionTime":"2025-12-05T10:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.269008 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.269057 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.269070 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.269090 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.269103 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:24Z","lastTransitionTime":"2025-12-05T10:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.372522 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.372622 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.372636 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.372670 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.372687 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:24Z","lastTransitionTime":"2025-12-05T10:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.475847 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.475926 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.475945 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.475973 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.475992 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:24Z","lastTransitionTime":"2025-12-05T10:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.586182 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.586254 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.586303 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.586333 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.586352 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:24Z","lastTransitionTime":"2025-12-05T10:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.689969 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.690051 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.690071 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.690094 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.690110 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:24Z","lastTransitionTime":"2025-12-05T10:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.793315 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.793369 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.793384 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.793402 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.793412 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:24Z","lastTransitionTime":"2025-12-05T10:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.897333 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.897388 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.897399 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.897614 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:24 crc kubenswrapper[5014]: I1205 10:49:24.897627 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:24Z","lastTransitionTime":"2025-12-05T10:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.001529 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.001625 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.001662 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.001695 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.001717 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:25Z","lastTransitionTime":"2025-12-05T10:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.105447 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.105506 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.105519 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.105540 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.105556 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:25Z","lastTransitionTime":"2025-12-05T10:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.208811 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.208869 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.208888 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.208910 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.208923 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:25Z","lastTransitionTime":"2025-12-05T10:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.311794 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.311850 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.311866 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.311887 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.311903 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:25Z","lastTransitionTime":"2025-12-05T10:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.317180 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.317315 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.317321 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.317335 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:25 crc kubenswrapper[5014]: E1205 10:49:25.317467 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:25 crc kubenswrapper[5014]: E1205 10:49:25.317593 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:25 crc kubenswrapper[5014]: E1205 10:49:25.317692 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:25 crc kubenswrapper[5014]: E1205 10:49:25.317987 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.415253 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.415416 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.415437 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.415507 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.415533 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:25Z","lastTransitionTime":"2025-12-05T10:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.518958 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.519003 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.519013 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.519034 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.519055 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:25Z","lastTransitionTime":"2025-12-05T10:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.622886 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.622964 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.622988 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.623014 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.623035 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:25Z","lastTransitionTime":"2025-12-05T10:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.727360 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.727433 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.727456 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.727507 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.727530 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:25Z","lastTransitionTime":"2025-12-05T10:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.831611 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.831667 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.831677 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.831694 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.831708 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:25Z","lastTransitionTime":"2025-12-05T10:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.934471 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.934545 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.934570 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.934605 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:25 crc kubenswrapper[5014]: I1205 10:49:25.934629 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:25Z","lastTransitionTime":"2025-12-05T10:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.001700 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.001771 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.001796 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.001854 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.001876 5014 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T10:49:26Z","lastTransitionTime":"2025-12-05T10:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.054448 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m"] Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.054816 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.056471 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.058383 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.058560 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.058691 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.083033 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.083111 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.083203 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-service-ca\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.083237 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.083646 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.095904 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=3.095879552 podStartE2EDuration="3.095879552s" podCreationTimestamp="2025-12-05 10:49:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.074698747 +0000 UTC m=+93.022816451" watchObservedRunningTime="2025-12-05 10:49:26.095879552 +0000 UTC m=+93.043997276" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.129700 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-h6gwn" podStartSLOduration=76.129672487 podStartE2EDuration="1m16.129672487s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.129302158 +0000 UTC m=+93.077419882" watchObservedRunningTime="2025-12-05 10:49:26.129672487 +0000 UTC m=+93.077790191" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.184315 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.184405 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.184446 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.184466 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.184499 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-service-ca\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.184627 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.184719 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.185567 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-service-ca\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.192725 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.198988 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=38.198962518 podStartE2EDuration="38.198962518s" podCreationTimestamp="2025-12-05 10:48:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.172747113 +0000 UTC m=+93.120864817" watchObservedRunningTime="2025-12-05 10:49:26.198962518 +0000 UTC m=+93.147080232" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.203223 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca8b19e1-8388-4f72-a1f4-70eec5d51d08-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-m452m\" (UID: \"ca8b19e1-8388-4f72-a1f4-70eec5d51d08\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.219720 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=75.219689502 podStartE2EDuration="1m15.219689502s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.202970804 +0000 UTC m=+93.151088518" watchObservedRunningTime="2025-12-05 10:49:26.219689502 +0000 UTC m=+93.167807206" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.241704 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=75.241674487 podStartE2EDuration="1m15.241674487s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.220253265 +0000 UTC m=+93.168371009" watchObservedRunningTime="2025-12-05 10:49:26.241674487 +0000 UTC m=+93.189792191" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.280920 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podStartSLOduration=76.280895001 podStartE2EDuration="1m16.280895001s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.280756648 +0000 UTC m=+93.228874372" watchObservedRunningTime="2025-12-05 10:49:26.280895001 +0000 UTC m=+93.229012705" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.293435 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-sk4qz" podStartSLOduration=76.29340921 podStartE2EDuration="1m16.29340921s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.292536818 +0000 UTC m=+93.240654522" watchObservedRunningTime="2025-12-05 10:49:26.29340921 +0000 UTC m=+93.241526904" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.320081 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=71.320056525 podStartE2EDuration="1m11.320056525s" podCreationTimestamp="2025-12-05 10:48:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.319498551 +0000 UTC m=+93.267616275" watchObservedRunningTime="2025-12-05 10:49:26.320056525 +0000 UTC m=+93.268174229" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.371311 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.401504 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-lkk2g" podStartSLOduration=75.401477925 podStartE2EDuration="1m15.401477925s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.36812629 +0000 UTC m=+93.316244014" watchObservedRunningTime="2025-12-05 10:49:26.401477925 +0000 UTC m=+93.349595639" Dec 05 10:49:26 crc kubenswrapper[5014]: I1205 10:49:26.449968 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-424mc" podStartSLOduration=75.449937129 podStartE2EDuration="1m15.449937129s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.431252814 +0000 UTC m=+93.379370518" watchObservedRunningTime="2025-12-05 10:49:26.449937129 +0000 UTC m=+93.398054833" Dec 05 10:49:27 crc kubenswrapper[5014]: I1205 10:49:27.249573 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" event={"ID":"ca8b19e1-8388-4f72-a1f4-70eec5d51d08","Type":"ContainerStarted","Data":"82bb7d09833828e062fa653dcce419365f152182755f1dca7980c41cad2ef8d6"} Dec 05 10:49:27 crc kubenswrapper[5014]: I1205 10:49:27.250765 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" event={"ID":"ca8b19e1-8388-4f72-a1f4-70eec5d51d08","Type":"ContainerStarted","Data":"717162419ecb167ef1345930ab609f52570a33b14520a0b64a6eb2e364b16365"} Dec 05 10:49:27 crc kubenswrapper[5014]: I1205 10:49:27.275150 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-w4trt" podStartSLOduration=76.275115685 podStartE2EDuration="1m16.275115685s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:26.450692188 +0000 UTC m=+93.398809892" watchObservedRunningTime="2025-12-05 10:49:27.275115685 +0000 UTC m=+94.223233429" Dec 05 10:49:27 crc kubenswrapper[5014]: I1205 10:49:27.275367 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m452m" podStartSLOduration=77.27535981 podStartE2EDuration="1m17.27535981s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:27.274624822 +0000 UTC m=+94.222742586" watchObservedRunningTime="2025-12-05 10:49:27.27535981 +0000 UTC m=+94.223477554" Dec 05 10:49:27 crc kubenswrapper[5014]: I1205 10:49:27.317894 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:27 crc kubenswrapper[5014]: I1205 10:49:27.318018 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:27 crc kubenswrapper[5014]: I1205 10:49:27.318018 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:27 crc kubenswrapper[5014]: I1205 10:49:27.317895 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:27 crc kubenswrapper[5014]: E1205 10:49:27.318143 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:27 crc kubenswrapper[5014]: E1205 10:49:27.318343 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:27 crc kubenswrapper[5014]: E1205 10:49:27.318387 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:27 crc kubenswrapper[5014]: E1205 10:49:27.318475 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:29 crc kubenswrapper[5014]: I1205 10:49:29.318179 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:29 crc kubenswrapper[5014]: I1205 10:49:29.318357 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:29 crc kubenswrapper[5014]: I1205 10:49:29.318400 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:29 crc kubenswrapper[5014]: I1205 10:49:29.318563 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:29 crc kubenswrapper[5014]: E1205 10:49:29.318543 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:29 crc kubenswrapper[5014]: E1205 10:49:29.319004 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:29 crc kubenswrapper[5014]: I1205 10:49:29.319260 5014 scope.go:117] "RemoveContainer" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 10:49:29 crc kubenswrapper[5014]: E1205 10:49:29.319369 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:29 crc kubenswrapper[5014]: E1205 10:49:29.319539 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" Dec 05 10:49:29 crc kubenswrapper[5014]: E1205 10:49:29.319708 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:29 crc kubenswrapper[5014]: I1205 10:49:29.828153 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:29 crc kubenswrapper[5014]: E1205 10:49:29.828463 5014 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:49:29 crc kubenswrapper[5014]: E1205 10:49:29.828618 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs podName:97abc013-62da-459c-b7ec-2a78304dcc56 nodeName:}" failed. No retries permitted until 2025-12-05 10:50:33.828584167 +0000 UTC m=+160.776701901 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs") pod "network-metrics-daemon-vrt2x" (UID: "97abc013-62da-459c-b7ec-2a78304dcc56") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 10:49:31 crc kubenswrapper[5014]: I1205 10:49:31.317658 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:31 crc kubenswrapper[5014]: I1205 10:49:31.317767 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:31 crc kubenswrapper[5014]: I1205 10:49:31.317922 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:31 crc kubenswrapper[5014]: E1205 10:49:31.318135 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:31 crc kubenswrapper[5014]: I1205 10:49:31.318239 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:31 crc kubenswrapper[5014]: E1205 10:49:31.318404 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:31 crc kubenswrapper[5014]: E1205 10:49:31.318757 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:31 crc kubenswrapper[5014]: E1205 10:49:31.318579 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:33 crc kubenswrapper[5014]: I1205 10:49:33.317449 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:33 crc kubenswrapper[5014]: I1205 10:49:33.317583 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:33 crc kubenswrapper[5014]: I1205 10:49:33.317584 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:33 crc kubenswrapper[5014]: I1205 10:49:33.317692 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:33 crc kubenswrapper[5014]: E1205 10:49:33.319189 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:33 crc kubenswrapper[5014]: E1205 10:49:33.319534 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:33 crc kubenswrapper[5014]: E1205 10:49:33.319658 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:33 crc kubenswrapper[5014]: E1205 10:49:33.319407 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:35 crc kubenswrapper[5014]: I1205 10:49:35.317752 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:35 crc kubenswrapper[5014]: I1205 10:49:35.317833 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:35 crc kubenswrapper[5014]: E1205 10:49:35.318555 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:35 crc kubenswrapper[5014]: I1205 10:49:35.317881 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:35 crc kubenswrapper[5014]: I1205 10:49:35.317832 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:35 crc kubenswrapper[5014]: E1205 10:49:35.318693 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:35 crc kubenswrapper[5014]: E1205 10:49:35.318769 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:35 crc kubenswrapper[5014]: E1205 10:49:35.318814 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:37 crc kubenswrapper[5014]: I1205 10:49:37.318115 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:37 crc kubenswrapper[5014]: E1205 10:49:37.318400 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:37 crc kubenswrapper[5014]: I1205 10:49:37.318637 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:37 crc kubenswrapper[5014]: I1205 10:49:37.318734 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:37 crc kubenswrapper[5014]: E1205 10:49:37.318768 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:37 crc kubenswrapper[5014]: E1205 10:49:37.319001 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:37 crc kubenswrapper[5014]: I1205 10:49:37.319037 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:37 crc kubenswrapper[5014]: E1205 10:49:37.319332 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:39 crc kubenswrapper[5014]: I1205 10:49:39.317710 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:39 crc kubenswrapper[5014]: I1205 10:49:39.318549 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:39 crc kubenswrapper[5014]: I1205 10:49:39.318600 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:39 crc kubenswrapper[5014]: I1205 10:49:39.318649 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:39 crc kubenswrapper[5014]: E1205 10:49:39.318501 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:39 crc kubenswrapper[5014]: E1205 10:49:39.318790 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:39 crc kubenswrapper[5014]: E1205 10:49:39.318881 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:39 crc kubenswrapper[5014]: E1205 10:49:39.319088 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:41 crc kubenswrapper[5014]: I1205 10:49:41.317814 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:41 crc kubenswrapper[5014]: E1205 10:49:41.318010 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:41 crc kubenswrapper[5014]: I1205 10:49:41.318367 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:41 crc kubenswrapper[5014]: E1205 10:49:41.318458 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:41 crc kubenswrapper[5014]: I1205 10:49:41.318618 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:41 crc kubenswrapper[5014]: I1205 10:49:41.318733 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:41 crc kubenswrapper[5014]: E1205 10:49:41.318840 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:41 crc kubenswrapper[5014]: E1205 10:49:41.319230 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:43 crc kubenswrapper[5014]: I1205 10:49:43.318142 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:43 crc kubenswrapper[5014]: I1205 10:49:43.318159 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:43 crc kubenswrapper[5014]: I1205 10:49:43.318260 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:43 crc kubenswrapper[5014]: E1205 10:49:43.319371 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:43 crc kubenswrapper[5014]: I1205 10:49:43.319452 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:43 crc kubenswrapper[5014]: E1205 10:49:43.319567 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:43 crc kubenswrapper[5014]: E1205 10:49:43.319802 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:43 crc kubenswrapper[5014]: E1205 10:49:43.320085 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:44 crc kubenswrapper[5014]: I1205 10:49:44.318589 5014 scope.go:117] "RemoveContainer" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 10:49:44 crc kubenswrapper[5014]: E1205 10:49:44.318952 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-znfbl_openshift-ovn-kubernetes(41fb1a99-1c51-4281-b73f-8a29357a0a2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" Dec 05 10:49:45 crc kubenswrapper[5014]: I1205 10:49:45.317492 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:45 crc kubenswrapper[5014]: I1205 10:49:45.317710 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:45 crc kubenswrapper[5014]: I1205 10:49:45.317737 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:45 crc kubenswrapper[5014]: E1205 10:49:45.317910 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:45 crc kubenswrapper[5014]: I1205 10:49:45.317933 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:45 crc kubenswrapper[5014]: E1205 10:49:45.318165 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:45 crc kubenswrapper[5014]: E1205 10:49:45.318234 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:45 crc kubenswrapper[5014]: E1205 10:49:45.318506 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:47 crc kubenswrapper[5014]: I1205 10:49:47.318085 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:47 crc kubenswrapper[5014]: I1205 10:49:47.318258 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:47 crc kubenswrapper[5014]: E1205 10:49:47.318352 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:47 crc kubenswrapper[5014]: I1205 10:49:47.318550 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:47 crc kubenswrapper[5014]: E1205 10:49:47.318541 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:47 crc kubenswrapper[5014]: I1205 10:49:47.318749 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:47 crc kubenswrapper[5014]: E1205 10:49:47.318721 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:47 crc kubenswrapper[5014]: E1205 10:49:47.318937 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:47 crc kubenswrapper[5014]: I1205 10:49:47.340308 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/1.log" Dec 05 10:49:47 crc kubenswrapper[5014]: I1205 10:49:47.341033 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/0.log" Dec 05 10:49:47 crc kubenswrapper[5014]: I1205 10:49:47.341111 5014 generic.go:334] "Generic (PLEG): container finished" podID="f8198e15-3b7a-4c40-b4b3-63382eba5846" containerID="f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd" exitCode=1 Dec 05 10:49:47 crc kubenswrapper[5014]: I1205 10:49:47.341170 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-424mc" event={"ID":"f8198e15-3b7a-4c40-b4b3-63382eba5846","Type":"ContainerDied","Data":"f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd"} Dec 05 10:49:47 crc kubenswrapper[5014]: I1205 10:49:47.341243 5014 scope.go:117] "RemoveContainer" containerID="0e6a02014ea9eea9752aca86d564e586c7abe71cd9f69e00f8d0392b71c829fc" Dec 05 10:49:47 crc kubenswrapper[5014]: I1205 10:49:47.342036 5014 scope.go:117] "RemoveContainer" containerID="f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd" Dec 05 10:49:47 crc kubenswrapper[5014]: E1205 10:49:47.342441 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-424mc_openshift-multus(f8198e15-3b7a-4c40-b4b3-63382eba5846)\"" pod="openshift-multus/multus-424mc" podUID="f8198e15-3b7a-4c40-b4b3-63382eba5846" Dec 05 10:49:48 crc kubenswrapper[5014]: I1205 10:49:48.347706 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/1.log" Dec 05 10:49:49 crc kubenswrapper[5014]: I1205 10:49:49.317360 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:49 crc kubenswrapper[5014]: I1205 10:49:49.317461 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:49 crc kubenswrapper[5014]: E1205 10:49:49.317560 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:49 crc kubenswrapper[5014]: E1205 10:49:49.317691 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:49 crc kubenswrapper[5014]: I1205 10:49:49.317720 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:49 crc kubenswrapper[5014]: I1205 10:49:49.317476 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:49 crc kubenswrapper[5014]: E1205 10:49:49.317833 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:49 crc kubenswrapper[5014]: E1205 10:49:49.317932 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:51 crc kubenswrapper[5014]: I1205 10:49:51.318166 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:51 crc kubenswrapper[5014]: I1205 10:49:51.318260 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:51 crc kubenswrapper[5014]: I1205 10:49:51.318434 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:51 crc kubenswrapper[5014]: E1205 10:49:51.318436 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:51 crc kubenswrapper[5014]: I1205 10:49:51.318468 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:51 crc kubenswrapper[5014]: E1205 10:49:51.318604 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:51 crc kubenswrapper[5014]: E1205 10:49:51.318708 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:51 crc kubenswrapper[5014]: E1205 10:49:51.318860 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:53 crc kubenswrapper[5014]: E1205 10:49:53.269997 5014 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 10:49:53 crc kubenswrapper[5014]: I1205 10:49:53.317533 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:53 crc kubenswrapper[5014]: I1205 10:49:53.317607 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:53 crc kubenswrapper[5014]: E1205 10:49:53.318878 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:53 crc kubenswrapper[5014]: I1205 10:49:53.318894 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:53 crc kubenswrapper[5014]: I1205 10:49:53.318996 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:53 crc kubenswrapper[5014]: E1205 10:49:53.319095 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:53 crc kubenswrapper[5014]: E1205 10:49:53.319222 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:53 crc kubenswrapper[5014]: E1205 10:49:53.319291 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:53 crc kubenswrapper[5014]: E1205 10:49:53.409072 5014 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 10:49:55 crc kubenswrapper[5014]: I1205 10:49:55.317824 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:55 crc kubenswrapper[5014]: E1205 10:49:55.318108 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:55 crc kubenswrapper[5014]: I1205 10:49:55.318695 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:55 crc kubenswrapper[5014]: E1205 10:49:55.318845 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:55 crc kubenswrapper[5014]: I1205 10:49:55.319141 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:55 crc kubenswrapper[5014]: E1205 10:49:55.319320 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:55 crc kubenswrapper[5014]: I1205 10:49:55.319698 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:55 crc kubenswrapper[5014]: E1205 10:49:55.319932 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:56 crc kubenswrapper[5014]: I1205 10:49:56.318494 5014 scope.go:117] "RemoveContainer" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 10:49:57 crc kubenswrapper[5014]: I1205 10:49:57.178451 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-vrt2x"] Dec 05 10:49:57 crc kubenswrapper[5014]: I1205 10:49:57.179063 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:57 crc kubenswrapper[5014]: E1205 10:49:57.179204 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:57 crc kubenswrapper[5014]: I1205 10:49:57.317615 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:57 crc kubenswrapper[5014]: I1205 10:49:57.317656 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:57 crc kubenswrapper[5014]: I1205 10:49:57.317656 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:57 crc kubenswrapper[5014]: E1205 10:49:57.317794 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:57 crc kubenswrapper[5014]: E1205 10:49:57.317931 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:57 crc kubenswrapper[5014]: E1205 10:49:57.317973 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:57 crc kubenswrapper[5014]: I1205 10:49:57.384898 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/3.log" Dec 05 10:49:57 crc kubenswrapper[5014]: I1205 10:49:57.389638 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerStarted","Data":"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170"} Dec 05 10:49:57 crc kubenswrapper[5014]: I1205 10:49:57.390107 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:49:57 crc kubenswrapper[5014]: I1205 10:49:57.422782 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podStartSLOduration=106.422740909 podStartE2EDuration="1m46.422740909s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:49:57.421240563 +0000 UTC m=+124.369358287" watchObservedRunningTime="2025-12-05 10:49:57.422740909 +0000 UTC m=+124.370858633" Dec 05 10:49:58 crc kubenswrapper[5014]: E1205 10:49:58.410623 5014 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 10:49:59 crc kubenswrapper[5014]: I1205 10:49:59.317794 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:49:59 crc kubenswrapper[5014]: I1205 10:49:59.317896 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:49:59 crc kubenswrapper[5014]: I1205 10:49:59.318066 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:49:59 crc kubenswrapper[5014]: I1205 10:49:59.318083 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:49:59 crc kubenswrapper[5014]: E1205 10:49:59.318474 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:49:59 crc kubenswrapper[5014]: E1205 10:49:59.319035 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:49:59 crc kubenswrapper[5014]: E1205 10:49:59.319417 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:49:59 crc kubenswrapper[5014]: E1205 10:49:59.319864 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:49:59 crc kubenswrapper[5014]: I1205 10:49:59.320165 5014 scope.go:117] "RemoveContainer" containerID="f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd" Dec 05 10:50:00 crc kubenswrapper[5014]: I1205 10:50:00.408225 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/1.log" Dec 05 10:50:00 crc kubenswrapper[5014]: I1205 10:50:00.408831 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-424mc" event={"ID":"f8198e15-3b7a-4c40-b4b3-63382eba5846","Type":"ContainerStarted","Data":"9573c4413ea9c82de910e5cc02c6dfd72517d90499efed04a40ee2df63b5cce8"} Dec 05 10:50:01 crc kubenswrapper[5014]: I1205 10:50:01.317885 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:50:01 crc kubenswrapper[5014]: I1205 10:50:01.317964 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:50:01 crc kubenswrapper[5014]: E1205 10:50:01.318123 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:50:01 crc kubenswrapper[5014]: I1205 10:50:01.318234 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:50:01 crc kubenswrapper[5014]: I1205 10:50:01.318461 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:50:01 crc kubenswrapper[5014]: E1205 10:50:01.318464 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:50:01 crc kubenswrapper[5014]: E1205 10:50:01.318572 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:50:01 crc kubenswrapper[5014]: E1205 10:50:01.318734 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:50:03 crc kubenswrapper[5014]: I1205 10:50:03.317438 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:50:03 crc kubenswrapper[5014]: I1205 10:50:03.317501 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:50:03 crc kubenswrapper[5014]: I1205 10:50:03.317506 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:50:03 crc kubenswrapper[5014]: I1205 10:50:03.317656 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:50:03 crc kubenswrapper[5014]: E1205 10:50:03.319472 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 10:50:03 crc kubenswrapper[5014]: E1205 10:50:03.319594 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 10:50:03 crc kubenswrapper[5014]: E1205 10:50:03.319673 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 10:50:03 crc kubenswrapper[5014]: E1205 10:50:03.319771 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vrt2x" podUID="97abc013-62da-459c-b7ec-2a78304dcc56" Dec 05 10:50:05 crc kubenswrapper[5014]: I1205 10:50:05.317426 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:50:05 crc kubenswrapper[5014]: I1205 10:50:05.317441 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:50:05 crc kubenswrapper[5014]: I1205 10:50:05.317603 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:50:05 crc kubenswrapper[5014]: I1205 10:50:05.317643 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:50:05 crc kubenswrapper[5014]: I1205 10:50:05.321158 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 10:50:05 crc kubenswrapper[5014]: I1205 10:50:05.321158 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 10:50:05 crc kubenswrapper[5014]: I1205 10:50:05.321165 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 10:50:05 crc kubenswrapper[5014]: I1205 10:50:05.321517 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 10:50:05 crc kubenswrapper[5014]: I1205 10:50:05.321664 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 10:50:05 crc kubenswrapper[5014]: I1205 10:50:05.322252 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.381672 5014 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.430442 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g5c8p"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.431191 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.431422 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-djp4n"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.431686 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.431738 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.432393 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.432462 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.433652 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.433965 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.434023 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.438635 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.454857 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.455113 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.455306 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.459778 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.462887 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.463132 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.463458 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.463512 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.463574 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.463675 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.463741 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.463800 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.463936 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.463975 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.464168 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.464231 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.464898 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.465040 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.465213 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.465420 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.465547 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.465725 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.465838 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.466021 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.466250 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.469604 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.474003 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.469742 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.469783 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.469814 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.469947 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.474958 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.475189 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.480337 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.482487 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.482736 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.484532 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.484758 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.489337 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.489548 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.491129 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-bzvs8"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.491355 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.491488 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.492249 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.492484 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.492823 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.494908 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.495673 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.496467 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rstcd"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.496909 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.498010 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.498244 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.504008 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.504093 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.504341 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.504415 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.504470 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.504524 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.504749 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.505232 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.514483 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-image-import-ca\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.514588 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5c620bea-8aad-4e50-8088-68e259884c27-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.514654 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e8af4e41-2e1b-4164-abc3-587460928aab-node-pullsecrets\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.514706 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8af4e41-2e1b-4164-abc3-587460928aab-serving-cert\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.514800 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-config\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.514849 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.514891 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17e4e5df-7027-456d-be2d-b412f4a379ea-serving-cert\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.514937 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-audit\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.514989 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsqw2\" (UniqueName: \"kubernetes.io/projected/e8af4e41-2e1b-4164-abc3-587460928aab-kube-api-access-wsqw2\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515070 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17e4e5df-7027-456d-be2d-b412f4a379ea-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515128 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-client-ca\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515176 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zfzv\" (UniqueName: \"kubernetes.io/projected/61134bd8-7840-4ba7-8ec8-02e41ed425cb-kube-api-access-5zfzv\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515236 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7c810fdc-b085-41e9-9f84-d09b8b28d809-auth-proxy-config\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515313 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1714e18-7b9d-435d-9602-caf20c5a43da-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jtgzb\" (UID: \"a1714e18-7b9d-435d-9602-caf20c5a43da\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515370 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdjlw\" (UniqueName: \"kubernetes.io/projected/7c810fdc-b085-41e9-9f84-d09b8b28d809-kube-api-access-bdjlw\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515433 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk9kb\" (UniqueName: \"kubernetes.io/projected/17e4e5df-7027-456d-be2d-b412f4a379ea-kube-api-access-pk9kb\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515502 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7c810fdc-b085-41e9-9f84-d09b8b28d809-machine-approver-tls\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515583 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5c620bea-8aad-4e50-8088-68e259884c27-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515638 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hfht\" (UniqueName: \"kubernetes.io/projected/5c620bea-8aad-4e50-8088-68e259884c27-kube-api-access-5hfht\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515694 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5c620bea-8aad-4e50-8088-68e259884c27-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515741 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-etcd-serving-ca\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515784 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e8af4e41-2e1b-4164-abc3-587460928aab-audit-dir\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515866 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c810fdc-b085-41e9-9f84-d09b8b28d809-config\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515911 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17e4e5df-7027-456d-be2d-b412f4a379ea-service-ca-bundle\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515959 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e4e5df-7027-456d-be2d-b412f4a379ea-config\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.516007 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-config\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.516058 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk655\" (UniqueName: \"kubernetes.io/projected/a1714e18-7b9d-435d-9602-caf20c5a43da-kube-api-access-pk655\") pod \"openshift-apiserver-operator-796bbdcf4f-jtgzb\" (UID: \"a1714e18-7b9d-435d-9602-caf20c5a43da\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.516099 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e8af4e41-2e1b-4164-abc3-587460928aab-etcd-client\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.516154 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e8af4e41-2e1b-4164-abc3-587460928aab-encryption-config\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.516229 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61134bd8-7840-4ba7-8ec8-02e41ed425cb-serving-cert\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.516306 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1714e18-7b9d-435d-9602-caf20c5a43da-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jtgzb\" (UID: \"a1714e18-7b9d-435d-9602-caf20c5a43da\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.515313 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.516765 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.516928 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.517180 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.516396 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.517463 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.517962 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.517984 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.518827 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.518962 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.518988 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.519074 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.519142 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.516474 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.519610 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.519663 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.519750 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.520245 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.520808 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.521041 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.522086 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.523361 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.523564 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.528766 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.528824 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.529969 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.530783 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.550084 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.550341 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.550939 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.551414 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.552295 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.552288 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5wnsv"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.553142 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-vf2bj"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.553205 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.553144 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.553329 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.553613 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.553650 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.553930 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.554056 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-vf2bj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.554063 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.554374 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.554498 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.554513 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.553197 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.554603 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.553251 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.554794 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.554869 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.556189 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.556731 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2s2hb"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.557664 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-b2znf"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.558263 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q4jjv"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.558364 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.558676 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.558930 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.559460 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.560095 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.560942 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.561178 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.561390 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7x7jf"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.562296 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.562845 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.563223 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.563698 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.565391 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.565428 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.565595 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.565853 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.565940 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.566035 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.566426 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.567523 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.568034 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.569097 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.569308 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.569641 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.570375 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.570990 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-5qwb7"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.571763 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.572961 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.573468 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jjj6x"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.574127 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.574161 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.574360 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.574892 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.574949 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.576667 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.578596 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.578994 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.594444 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.602084 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.602132 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2mtlm"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.603244 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.603671 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.606365 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.607163 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.607570 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.609135 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.614083 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-djp4n"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.616215 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-k6tg9"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.617540 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.617701 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.618575 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-client-ca\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.618626 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zfzv\" (UniqueName: \"kubernetes.io/projected/61134bd8-7840-4ba7-8ec8-02e41ed425cb-kube-api-access-5zfzv\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.618667 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7c810fdc-b085-41e9-9f84-d09b8b28d809-auth-proxy-config\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.618704 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxw22\" (UniqueName: \"kubernetes.io/projected/9230af7f-443e-452e-b3ba-8bd78a0f8211-kube-api-access-zxw22\") pod \"package-server-manager-789f6589d5-mshdw\" (UID: \"9230af7f-443e-452e-b3ba-8bd78a0f8211\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.618976 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68d7z\" (UniqueName: \"kubernetes.io/projected/e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e-kube-api-access-68d7z\") pod \"openshift-config-operator-7777fb866f-bjhj7\" (UID: \"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619112 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdjlw\" (UniqueName: \"kubernetes.io/projected/7c810fdc-b085-41e9-9f84-d09b8b28d809-kube-api-access-bdjlw\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619148 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-serving-cert\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619196 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/505e68d1-6316-4847-a116-79e58bbc711d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-nbksm\" (UID: \"505e68d1-6316-4847-a116-79e58bbc711d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619256 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1714e18-7b9d-435d-9602-caf20c5a43da-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jtgzb\" (UID: \"a1714e18-7b9d-435d-9602-caf20c5a43da\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619393 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-trusted-ca-bundle\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619472 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk9kb\" (UniqueName: \"kubernetes.io/projected/17e4e5df-7027-456d-be2d-b412f4a379ea-kube-api-access-pk9kb\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619529 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7c810fdc-b085-41e9-9f84-d09b8b28d809-machine-approver-tls\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619558 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-oauth-config\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619588 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdvm8\" (UniqueName: \"kubernetes.io/projected/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-kube-api-access-cdvm8\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619659 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a70c7642-31e1-473d-9ebc-22af0ca255ec-config\") pod \"kube-controller-manager-operator-78b949d7b-lzrfw\" (UID: \"a70c7642-31e1-473d-9ebc-22af0ca255ec\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619719 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5c620bea-8aad-4e50-8088-68e259884c27-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619743 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-config\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619796 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hfht\" (UniqueName: \"kubernetes.io/projected/5c620bea-8aad-4e50-8088-68e259884c27-kube-api-access-5hfht\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619832 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5c620bea-8aad-4e50-8088-68e259884c27-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619860 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bjhj7\" (UID: \"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.619924 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-etcd-serving-ca\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620005 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e8af4e41-2e1b-4164-abc3-587460928aab-audit-dir\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620077 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-client-ca\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620169 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-oauth-serving-cert\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620209 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a70c7642-31e1-473d-9ebc-22af0ca255ec-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lzrfw\" (UID: \"a70c7642-31e1-473d-9ebc-22af0ca255ec\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620249 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/505e68d1-6316-4847-a116-79e58bbc711d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-nbksm\" (UID: \"505e68d1-6316-4847-a116-79e58bbc711d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620307 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c810fdc-b085-41e9-9f84-d09b8b28d809-config\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620336 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17e4e5df-7027-456d-be2d-b412f4a379ea-service-ca-bundle\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620390 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-config\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620415 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e4e5df-7027-456d-be2d-b412f4a379ea-config\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620444 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk655\" (UniqueName: \"kubernetes.io/projected/a1714e18-7b9d-435d-9602-caf20c5a43da-kube-api-access-pk655\") pod \"openshift-apiserver-operator-796bbdcf4f-jtgzb\" (UID: \"a1714e18-7b9d-435d-9602-caf20c5a43da\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620468 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e8af4e41-2e1b-4164-abc3-587460928aab-etcd-client\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620489 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9230af7f-443e-452e-b3ba-8bd78a0f8211-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mshdw\" (UID: \"9230af7f-443e-452e-b3ba-8bd78a0f8211\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620551 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61134bd8-7840-4ba7-8ec8-02e41ed425cb-serving-cert\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620596 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e8af4e41-2e1b-4164-abc3-587460928aab-encryption-config\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620625 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1714e18-7b9d-435d-9602-caf20c5a43da-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jtgzb\" (UID: \"a1714e18-7b9d-435d-9602-caf20c5a43da\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620649 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-image-import-ca\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620673 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5442h\" (UniqueName: \"kubernetes.io/projected/505e68d1-6316-4847-a116-79e58bbc711d-kube-api-access-5442h\") pod \"kube-storage-version-migrator-operator-b67b599dd-nbksm\" (UID: \"505e68d1-6316-4847-a116-79e58bbc711d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620694 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5c620bea-8aad-4e50-8088-68e259884c27-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620719 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e8af4e41-2e1b-4164-abc3-587460928aab-node-pullsecrets\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620734 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a70c7642-31e1-473d-9ebc-22af0ca255ec-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lzrfw\" (UID: \"a70c7642-31e1-473d-9ebc-22af0ca255ec\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620758 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8af4e41-2e1b-4164-abc3-587460928aab-serving-cert\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620815 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-config\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620843 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620862 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17e4e5df-7027-456d-be2d-b412f4a379ea-serving-cert\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620895 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-audit\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620920 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsqw2\" (UniqueName: \"kubernetes.io/projected/e8af4e41-2e1b-4164-abc3-587460928aab-kube-api-access-wsqw2\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620937 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-service-ca\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620956 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e73a6475-be76-44c4-8fd9-eb5c6799e7fc-metrics-tls\") pod \"dns-operator-744455d44c-rstcd\" (UID: \"e73a6475-be76-44c4-8fd9-eb5c6799e7fc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.620978 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2frqg\" (UniqueName: \"kubernetes.io/projected/e73a6475-be76-44c4-8fd9-eb5c6799e7fc-kube-api-access-2frqg\") pod \"dns-operator-744455d44c-rstcd\" (UID: \"e73a6475-be76-44c4-8fd9-eb5c6799e7fc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.621014 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17e4e5df-7027-456d-be2d-b412f4a379ea-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.621035 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e-serving-cert\") pod \"openshift-config-operator-7777fb866f-bjhj7\" (UID: \"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.621159 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e8af4e41-2e1b-4164-abc3-587460928aab-audit-dir\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.621975 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-etcd-serving-ca\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.622662 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17e4e5df-7027-456d-be2d-b412f4a379ea-service-ca-bundle\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.623306 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e4e5df-7027-456d-be2d-b412f4a379ea-config\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.624438 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7c810fdc-b085-41e9-9f84-d09b8b28d809-auth-proxy-config\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.624459 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e8af4e41-2e1b-4164-abc3-587460928aab-node-pullsecrets\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.625388 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5c620bea-8aad-4e50-8088-68e259884c27-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.625393 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-audit\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.626330 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-config\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.627197 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17e4e5df-7027-456d-be2d-b412f4a379ea-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.627134 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1714e18-7b9d-435d-9602-caf20c5a43da-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jtgzb\" (UID: \"a1714e18-7b9d-435d-9602-caf20c5a43da\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.630826 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1714e18-7b9d-435d-9602-caf20c5a43da-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jtgzb\" (UID: \"a1714e18-7b9d-435d-9602-caf20c5a43da\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.631336 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-config\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.632033 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.632322 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c810fdc-b085-41e9-9f84-d09b8b28d809-config\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.632904 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7c810fdc-b085-41e9-9f84-d09b8b28d809-machine-approver-tls\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.632607 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17e4e5df-7027-456d-be2d-b412f4a379ea-serving-cert\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.633752 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5c620bea-8aad-4e50-8088-68e259884c27-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.634308 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.634424 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e8af4e41-2e1b-4164-abc3-587460928aab-image-import-ca\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.635766 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g5c8p"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.636479 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e8af4e41-2e1b-4164-abc3-587460928aab-encryption-config\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.638201 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.640335 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nphwc"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.641551 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.641746 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.642824 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.642723 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7sfns"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.643699 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.647446 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.648710 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-bzvs8"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.648778 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.650708 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.653343 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8af4e41-2e1b-4164-abc3-587460928aab-serving-cert\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.653430 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-vf2bj"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.654110 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61134bd8-7840-4ba7-8ec8-02e41ed425cb-serving-cert\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.655234 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-xjs5g"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.656414 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e8af4e41-2e1b-4164-abc3-587460928aab-etcd-client\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.655461 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.656871 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.657153 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.660358 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9gwng"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.661312 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.661336 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5wnsv"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.661460 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9gwng" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.667635 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-b2znf"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.669399 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.673202 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.674519 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.678237 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.679396 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rstcd"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.680833 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.681990 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.684875 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.686339 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.687432 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.688770 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.692121 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q4jjv"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.694877 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.695234 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.696593 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7x7jf"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.697659 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.699262 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2mtlm"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.700257 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2s2hb"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.701877 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.703003 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.704269 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nphwc"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.705538 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9gwng"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.707135 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jjj6x"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.707823 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-2gdp8"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.708816 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.708849 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-778n8"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.709947 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.710744 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.714912 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.715170 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-k6tg9"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.720537 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724238 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5442h\" (UniqueName: \"kubernetes.io/projected/505e68d1-6316-4847-a116-79e58bbc711d-kube-api-access-5442h\") pod \"kube-storage-version-migrator-operator-b67b599dd-nbksm\" (UID: \"505e68d1-6316-4847-a116-79e58bbc711d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724326 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a70c7642-31e1-473d-9ebc-22af0ca255ec-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lzrfw\" (UID: \"a70c7642-31e1-473d-9ebc-22af0ca255ec\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724377 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e73a6475-be76-44c4-8fd9-eb5c6799e7fc-metrics-tls\") pod \"dns-operator-744455d44c-rstcd\" (UID: \"e73a6475-be76-44c4-8fd9-eb5c6799e7fc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724408 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2frqg\" (UniqueName: \"kubernetes.io/projected/e73a6475-be76-44c4-8fd9-eb5c6799e7fc-kube-api-access-2frqg\") pod \"dns-operator-744455d44c-rstcd\" (UID: \"e73a6475-be76-44c4-8fd9-eb5c6799e7fc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724452 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-service-ca\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724489 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e-serving-cert\") pod \"openshift-config-operator-7777fb866f-bjhj7\" (UID: \"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724524 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68d7z\" (UniqueName: \"kubernetes.io/projected/e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e-kube-api-access-68d7z\") pod \"openshift-config-operator-7777fb866f-bjhj7\" (UID: \"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724557 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxw22\" (UniqueName: \"kubernetes.io/projected/9230af7f-443e-452e-b3ba-8bd78a0f8211-kube-api-access-zxw22\") pod \"package-server-manager-789f6589d5-mshdw\" (UID: \"9230af7f-443e-452e-b3ba-8bd78a0f8211\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724602 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-serving-cert\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724683 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/505e68d1-6316-4847-a116-79e58bbc711d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-nbksm\" (UID: \"505e68d1-6316-4847-a116-79e58bbc711d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724716 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-trusted-ca-bundle\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.724752 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-oauth-config\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.725093 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdvm8\" (UniqueName: \"kubernetes.io/projected/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-kube-api-access-cdvm8\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.725205 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a70c7642-31e1-473d-9ebc-22af0ca255ec-config\") pod \"kube-controller-manager-operator-78b949d7b-lzrfw\" (UID: \"a70c7642-31e1-473d-9ebc-22af0ca255ec\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.725251 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-config\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.725351 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bjhj7\" (UID: \"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.725524 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-oauth-serving-cert\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.725597 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a70c7642-31e1-473d-9ebc-22af0ca255ec-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lzrfw\" (UID: \"a70c7642-31e1-473d-9ebc-22af0ca255ec\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.725819 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/505e68d1-6316-4847-a116-79e58bbc711d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-nbksm\" (UID: \"505e68d1-6316-4847-a116-79e58bbc711d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.725878 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9230af7f-443e-452e-b3ba-8bd78a0f8211-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mshdw\" (UID: \"9230af7f-443e-452e-b3ba-8bd78a0f8211\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.726655 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bjhj7\" (UID: \"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.727031 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7sfns"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.727126 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-service-ca\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.727650 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a70c7642-31e1-473d-9ebc-22af0ca255ec-config\") pod \"kube-controller-manager-operator-78b949d7b-lzrfw\" (UID: \"a70c7642-31e1-473d-9ebc-22af0ca255ec\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.727915 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-oauth-serving-cert\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.728110 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e73a6475-be76-44c4-8fd9-eb5c6799e7fc-metrics-tls\") pod \"dns-operator-744455d44c-rstcd\" (UID: \"e73a6475-be76-44c4-8fd9-eb5c6799e7fc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.728826 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-trusted-ca-bundle\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.729154 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-config\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.732351 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e-serving-cert\") pod \"openshift-config-operator-7777fb866f-bjhj7\" (UID: \"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.732787 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-serving-cert\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.733131 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-oauth-config\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.735385 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.737521 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a70c7642-31e1-473d-9ebc-22af0ca255ec-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lzrfw\" (UID: \"a70c7642-31e1-473d-9ebc-22af0ca255ec\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.741942 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.744141 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-2gdp8"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.745371 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-778n8"] Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.754914 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.788866 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.794612 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.815112 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.835653 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.855128 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.874620 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.895212 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.918315 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.944890 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.955193 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.974404 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 10:50:06 crc kubenswrapper[5014]: I1205 10:50:06.994709 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.015587 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.035046 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.055862 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.075028 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.094196 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.116132 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.135803 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.154861 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.176077 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.195182 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.215680 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.236661 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.254810 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.277012 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.295657 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.316313 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.335802 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.355965 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.375694 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.395182 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.415113 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.435017 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.475734 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.482463 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9230af7f-443e-452e-b3ba-8bd78a0f8211-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mshdw\" (UID: \"9230af7f-443e-452e-b3ba-8bd78a0f8211\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.496089 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.515243 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.520527 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/505e68d1-6316-4847-a116-79e58bbc711d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-nbksm\" (UID: \"505e68d1-6316-4847-a116-79e58bbc711d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.536483 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.575750 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.578447 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/505e68d1-6316-4847-a116-79e58bbc711d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-nbksm\" (UID: \"505e68d1-6316-4847-a116-79e58bbc711d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.593626 5014 request.go:700] Waited for 1.004385649s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-storage-version-migrator-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.595393 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.614832 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.635154 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.655194 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.676112 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.694817 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.715562 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.747482 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.755740 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.776223 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.795072 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.816673 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.836939 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.856169 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.875145 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.896565 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.916515 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.965753 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zfzv\" (UniqueName: \"kubernetes.io/projected/61134bd8-7840-4ba7-8ec8-02e41ed425cb-kube-api-access-5zfzv\") pod \"route-controller-manager-6576b87f9c-xzr9m\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:07 crc kubenswrapper[5014]: I1205 10:50:07.986542 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdjlw\" (UniqueName: \"kubernetes.io/projected/7c810fdc-b085-41e9-9f84-d09b8b28d809-kube-api-access-bdjlw\") pod \"machine-approver-56656f9798-r2lwj\" (UID: \"7c810fdc-b085-41e9-9f84-d09b8b28d809\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.008975 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hfht\" (UniqueName: \"kubernetes.io/projected/5c620bea-8aad-4e50-8088-68e259884c27-kube-api-access-5hfht\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.018529 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.025129 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk9kb\" (UniqueName: \"kubernetes.io/projected/17e4e5df-7027-456d-be2d-b412f4a379ea-kube-api-access-pk9kb\") pod \"authentication-operator-69f744f599-djp4n\" (UID: \"17e4e5df-7027-456d-be2d-b412f4a379ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.036666 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.043424 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5c620bea-8aad-4e50-8088-68e259884c27-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-fsgvs\" (UID: \"5c620bea-8aad-4e50-8088-68e259884c27\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.053499 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.055745 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.075381 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.075503 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.118790 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsqw2\" (UniqueName: \"kubernetes.io/projected/e8af4e41-2e1b-4164-abc3-587460928aab-kube-api-access-wsqw2\") pod \"apiserver-76f77b778f-g5c8p\" (UID: \"e8af4e41-2e1b-4164-abc3-587460928aab\") " pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.130513 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk655\" (UniqueName: \"kubernetes.io/projected/a1714e18-7b9d-435d-9602-caf20c5a43da-kube-api-access-pk655\") pod \"openshift-apiserver-operator-796bbdcf4f-jtgzb\" (UID: \"a1714e18-7b9d-435d-9602-caf20c5a43da\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.136447 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.155836 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.177038 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.207297 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.233105 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.234562 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.255884 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.275148 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.279136 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.294931 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.298931 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.315737 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.331438 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.335674 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.355372 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.375341 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.395060 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.414620 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.435902 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.452230 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" event={"ID":"7c810fdc-b085-41e9-9f84-d09b8b28d809","Type":"ContainerStarted","Data":"48e2ce4fdeaf883799ae360e9a7ac9d8108fc9631a10784d4888f1a481906231"} Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.456322 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.475354 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.495338 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.514711 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.535959 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.555801 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.575461 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.596362 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.613400 5014 request.go:700] Waited for 1.903080264s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.616131 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.635894 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.655534 5014 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.690976 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5442h\" (UniqueName: \"kubernetes.io/projected/505e68d1-6316-4847-a116-79e58bbc711d-kube-api-access-5442h\") pod \"kube-storage-version-migrator-operator-b67b599dd-nbksm\" (UID: \"505e68d1-6316-4847-a116-79e58bbc711d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.710473 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2frqg\" (UniqueName: \"kubernetes.io/projected/e73a6475-be76-44c4-8fd9-eb5c6799e7fc-kube-api-access-2frqg\") pod \"dns-operator-744455d44c-rstcd\" (UID: \"e73a6475-be76-44c4-8fd9-eb5c6799e7fc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.726670 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxw22\" (UniqueName: \"kubernetes.io/projected/9230af7f-443e-452e-b3ba-8bd78a0f8211-kube-api-access-zxw22\") pod \"package-server-manager-789f6589d5-mshdw\" (UID: \"9230af7f-443e-452e-b3ba-8bd78a0f8211\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.766034 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdvm8\" (UniqueName: \"kubernetes.io/projected/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-kube-api-access-cdvm8\") pod \"console-f9d7485db-bzvs8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.772868 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68d7z\" (UniqueName: \"kubernetes.io/projected/e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e-kube-api-access-68d7z\") pod \"openshift-config-operator-7777fb866f-bjhj7\" (UID: \"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.777299 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.801082 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.807444 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a70c7642-31e1-473d-9ebc-22af0ca255ec-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lzrfw\" (UID: \"a70c7642-31e1-473d-9ebc-22af0ca255ec\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.832762 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.848573 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m"] Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856470 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856510 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856528 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f13e37-538f-4ef3-9b4f-0be841ea1078-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-cfqtb\" (UID: \"b3f13e37-538f-4ef3-9b4f-0be841ea1078\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856552 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b70a66a-f89a-4578-adbc-50757fdb730f-config\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856569 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8b70a66a-f89a-4578-adbc-50757fdb730f-etcd-client\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856591 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ad71de77-0b33-48ff-86d1-87235f83b4bf-images\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856608 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0462b518-e848-4601-bcf6-d30a321e2191-srv-cert\") pod \"catalog-operator-68c6474976-sdrhf\" (UID: \"0462b518-e848-4601-bcf6-d30a321e2191\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856629 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/425046d9-b7c9-4b15-be69-2b2ab11aad8f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856648 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-default-certificate\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856667 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1a285682-cde2-4857-9e6f-e41577d083de-etcd-client\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856682 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a285682-cde2-4857-9e6f-e41577d083de-serving-cert\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856817 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kjk6\" (UniqueName: \"kubernetes.io/projected/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-kube-api-access-6kjk6\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856889 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/425046d9-b7c9-4b15-be69-2b2ab11aad8f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856948 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aec87cf-7349-4cd6-8364-333bf8614193-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-n9mw4\" (UID: \"0aec87cf-7349-4cd6-8364-333bf8614193\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856974 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f13e37-538f-4ef3-9b4f-0be841ea1078-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-cfqtb\" (UID: \"b3f13e37-538f-4ef3-9b4f-0be841ea1078\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.856999 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvmss\" (UniqueName: \"kubernetes.io/projected/edf32fc4-b4ff-4b20-9a56-78331a268e28-kube-api-access-gvmss\") pod \"olm-operator-6b444d44fb-dkhwk\" (UID: \"edf32fc4-b4ff-4b20-9a56-78331a268e28\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857023 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1a285682-cde2-4857-9e6f-e41577d083de-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857096 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad71de77-0b33-48ff-86d1-87235f83b4bf-config\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857158 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857241 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857329 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a285682-cde2-4857-9e6f-e41577d083de-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857381 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857424 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-certificates\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857449 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d8b0502f-2954-4ebc-9920-28afad95dc00-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jjj6x\" (UID: \"d8b0502f-2954-4ebc-9920-28afad95dc00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857490 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b38c4eae-27cf-40fd-89ee-6513d241b130-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857515 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857545 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znthd\" (UniqueName: \"kubernetes.io/projected/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-kube-api-access-znthd\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857570 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqg5j\" (UniqueName: \"kubernetes.io/projected/bc735a51-3df7-4004-9fce-421450c9d084-kube-api-access-dqg5j\") pod \"cluster-samples-operator-665b6dd947-l5dft\" (UID: \"bc735a51-3df7-4004-9fce-421450c9d084\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857596 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857639 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-trusted-ca\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857665 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1a285682-cde2-4857-9e6f-e41577d083de-audit-dir\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857693 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-bound-sa-token\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: E1205 10:50:08.857714 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:09.357695934 +0000 UTC m=+136.305813828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857760 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lm9k\" (UniqueName: \"kubernetes.io/projected/8b70a66a-f89a-4578-adbc-50757fdb730f-kube-api-access-5lm9k\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857828 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8b70a66a-f89a-4578-adbc-50757fdb730f-etcd-service-ca\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857862 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857902 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf5c2\" (UniqueName: \"kubernetes.io/projected/b38c4eae-27cf-40fd-89ee-6513d241b130-kube-api-access-gf5c2\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857957 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd7xr\" (UniqueName: \"kubernetes.io/projected/0462b518-e848-4601-bcf6-d30a321e2191-kube-api-access-dd7xr\") pod \"catalog-operator-68c6474976-sdrhf\" (UID: \"0462b518-e848-4601-bcf6-d30a321e2191\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.857978 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m467d\" (UniqueName: \"kubernetes.io/projected/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-kube-api-access-m467d\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858226 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-metrics-certs\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858333 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jgkv\" (UniqueName: \"kubernetes.io/projected/506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f-kube-api-access-7jgkv\") pod \"downloads-7954f5f757-vf2bj\" (UID: \"506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f\") " pod="openshift-console/downloads-7954f5f757-vf2bj" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858378 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/9357b561-29c1-4fb1-9004-8bf8378aad02-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-tsztt\" (UID: \"9357b561-29c1-4fb1-9004-8bf8378aad02\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858428 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0aec87cf-7349-4cd6-8364-333bf8614193-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-n9mw4\" (UID: \"0aec87cf-7349-4cd6-8364-333bf8614193\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858446 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-policies\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858477 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-stats-auth\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858516 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-tls\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858562 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8b70a66a-f89a-4578-adbc-50757fdb730f-etcd-ca\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858680 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65q4s\" (UniqueName: \"kubernetes.io/projected/ad71de77-0b33-48ff-86d1-87235f83b4bf-kube-api-access-65q4s\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858757 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0462b518-e848-4601-bcf6-d30a321e2191-profile-collector-cert\") pod \"catalog-operator-68c6474976-sdrhf\" (UID: \"0462b518-e848-4601-bcf6-d30a321e2191\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858790 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/bc735a51-3df7-4004-9fce-421450c9d084-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-l5dft\" (UID: \"bc735a51-3df7-4004-9fce-421450c9d084\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858814 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-dir\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858840 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858899 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6sjh\" (UniqueName: \"kubernetes.io/projected/74f4896d-2ffa-459a-a62d-9c29df96e5dc-kube-api-access-m6sjh\") pod \"migrator-59844c95c7-bvs6h\" (UID: \"74f4896d-2ffa-459a-a62d-9c29df96e5dc\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858917 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/edf32fc4-b4ff-4b20-9a56-78331a268e28-profile-collector-cert\") pod \"olm-operator-6b444d44fb-dkhwk\" (UID: \"edf32fc4-b4ff-4b20-9a56-78331a268e28\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858941 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.858981 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8t86\" (UniqueName: \"kubernetes.io/projected/d8b0502f-2954-4ebc-9920-28afad95dc00-kube-api-access-z8t86\") pod \"multus-admission-controller-857f4d67dd-jjj6x\" (UID: \"d8b0502f-2954-4ebc-9920-28afad95dc00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859004 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859025 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b70a66a-f89a-4578-adbc-50757fdb730f-serving-cert\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859048 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/edf32fc4-b4ff-4b20-9a56-78331a268e28-srv-cert\") pod \"olm-operator-6b444d44fb-dkhwk\" (UID: \"edf32fc4-b4ff-4b20-9a56-78331a268e28\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859071 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj7dj\" (UniqueName: \"kubernetes.io/projected/1a285682-cde2-4857-9e6f-e41577d083de-kube-api-access-sj7dj\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859090 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859116 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b38c4eae-27cf-40fd-89ee-6513d241b130-metrics-tls\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859137 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkr6g\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-kube-api-access-mkr6g\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859156 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1a285682-cde2-4857-9e6f-e41577d083de-audit-policies\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859195 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b38c4eae-27cf-40fd-89ee-6513d241b130-trusted-ca\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859212 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad71de77-0b33-48ff-86d1-87235f83b4bf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859243 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-service-ca-bundle\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859263 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aec87cf-7349-4cd6-8364-333bf8614193-config\") pod \"kube-apiserver-operator-766d6c64bb-n9mw4\" (UID: \"0aec87cf-7349-4cd6-8364-333bf8614193\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859308 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-config\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859337 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-client-ca\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859353 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-serving-cert\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859386 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1a285682-cde2-4857-9e6f-e41577d083de-encryption-config\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859406 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbvdq\" (UniqueName: \"kubernetes.io/projected/b3f13e37-538f-4ef3-9b4f-0be841ea1078-kube-api-access-fbvdq\") pod \"openshift-controller-manager-operator-756b6f6bc6-cfqtb\" (UID: \"b3f13e37-538f-4ef3-9b4f-0be841ea1078\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859441 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds6vl\" (UniqueName: \"kubernetes.io/projected/9357b561-29c1-4fb1-9004-8bf8378aad02-kube-api-access-ds6vl\") pod \"control-plane-machine-set-operator-78cbb6b69f-tsztt\" (UID: \"9357b561-29c1-4fb1-9004-8bf8378aad02\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.859458 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.860903 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs"] Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.957763 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.960767 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:08 crc kubenswrapper[5014]: E1205 10:50:08.960972 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:09.460928225 +0000 UTC m=+136.409045929 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961070 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1a285682-cde2-4857-9e6f-e41577d083de-encryption-config\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961115 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df1aa6de-1ebf-4646-9a50-aa735e7ce529-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7wfct\" (UID: \"df1aa6de-1ebf-4646-9a50-aa735e7ce529\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961145 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/22a560b7-9ab6-434e-b5f6-fe8692561dd4-metrics-tls\") pod \"dns-default-2gdp8\" (UID: \"22a560b7-9ab6-434e-b5f6-fe8692561dd4\") " pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961170 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4h9b\" (UniqueName: \"kubernetes.io/projected/a0934bb9-807d-4fac-90db-4535e626a2b2-kube-api-access-g4h9b\") pod \"machine-config-server-xjs5g\" (UID: \"a0934bb9-807d-4fac-90db-4535e626a2b2\") " pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961208 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbvdq\" (UniqueName: \"kubernetes.io/projected/b3f13e37-538f-4ef3-9b4f-0be841ea1078-kube-api-access-fbvdq\") pod \"openshift-controller-manager-operator-756b6f6bc6-cfqtb\" (UID: \"b3f13e37-538f-4ef3-9b4f-0be841ea1078\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961236 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/767efcaf-90a1-47ea-93a7-0583e15cbd3d-webhook-cert\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961297 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds6vl\" (UniqueName: \"kubernetes.io/projected/9357b561-29c1-4fb1-9004-8bf8378aad02-kube-api-access-ds6vl\") pod \"control-plane-machine-set-operator-78cbb6b69f-tsztt\" (UID: \"9357b561-29c1-4fb1-9004-8bf8378aad02\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961327 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961359 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961388 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961413 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f13e37-538f-4ef3-9b4f-0be841ea1078-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-cfqtb\" (UID: \"b3f13e37-538f-4ef3-9b4f-0be841ea1078\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961444 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsjb7\" (UniqueName: \"kubernetes.io/projected/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-kube-api-access-xsjb7\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961488 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7e23855f-2f97-407a-89ba-7af3b1e7e70a-signing-cabundle\") pod \"service-ca-9c57cc56f-k6tg9\" (UID: \"7e23855f-2f97-407a-89ba-7af3b1e7e70a\") " pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961514 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df1aa6de-1ebf-4646-9a50-aa735e7ce529-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7wfct\" (UID: \"df1aa6de-1ebf-4646-9a50-aa735e7ce529\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961551 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8hgt\" (UniqueName: \"kubernetes.io/projected/388f5e1e-b01b-4321-99ca-f97f3812f98f-kube-api-access-l8hgt\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961586 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b70a66a-f89a-4578-adbc-50757fdb730f-config\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961610 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8b70a66a-f89a-4578-adbc-50757fdb730f-etcd-client\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961645 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxk5p\" (UniqueName: \"kubernetes.io/projected/9aa2370d-27db-4547-95f3-f09274275737-kube-api-access-jxk5p\") pod \"collect-profiles-29415525-jdvh2\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961678 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ad71de77-0b33-48ff-86d1-87235f83b4bf-images\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961705 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0462b518-e848-4601-bcf6-d30a321e2191-srv-cert\") pod \"catalog-operator-68c6474976-sdrhf\" (UID: \"0462b518-e848-4601-bcf6-d30a321e2191\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961735 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-plugins-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961763 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/425046d9-b7c9-4b15-be69-2b2ab11aad8f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961788 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-default-certificate\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961812 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-registration-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961837 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/895d4707-26ef-44b2-aecc-d80f274b4b92-config\") pod \"service-ca-operator-777779d784-7sfns\" (UID: \"895d4707-26ef-44b2-aecc-d80f274b4b92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961867 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1a285682-cde2-4857-9e6f-e41577d083de-etcd-client\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961892 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a285682-cde2-4857-9e6f-e41577d083de-serving-cert\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961917 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c6353800-5d71-4732-a97d-3aeb02b4d648-trusted-ca\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961947 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kjk6\" (UniqueName: \"kubernetes.io/projected/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-kube-api-access-6kjk6\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.961971 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6353800-5d71-4732-a97d-3aeb02b4d648-config\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962000 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7e23855f-2f97-407a-89ba-7af3b1e7e70a-signing-key\") pod \"service-ca-9c57cc56f-k6tg9\" (UID: \"7e23855f-2f97-407a-89ba-7af3b1e7e70a\") " pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962045 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/425046d9-b7c9-4b15-be69-2b2ab11aad8f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962088 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klhnn\" (UniqueName: \"kubernetes.io/projected/767efcaf-90a1-47ea-93a7-0583e15cbd3d-kube-api-access-klhnn\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962112 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/15a55000-f900-4061-a9fc-6983f8ba066e-cert\") pod \"ingress-canary-9gwng\" (UID: \"15a55000-f900-4061-a9fc-6983f8ba066e\") " pod="openshift-ingress-canary/ingress-canary-9gwng" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962140 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aec87cf-7349-4cd6-8364-333bf8614193-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-n9mw4\" (UID: \"0aec87cf-7349-4cd6-8364-333bf8614193\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962165 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f13e37-538f-4ef3-9b4f-0be841ea1078-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-cfqtb\" (UID: \"b3f13e37-538f-4ef3-9b4f-0be841ea1078\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962194 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvmss\" (UniqueName: \"kubernetes.io/projected/edf32fc4-b4ff-4b20-9a56-78331a268e28-kube-api-access-gvmss\") pod \"olm-operator-6b444d44fb-dkhwk\" (UID: \"edf32fc4-b4ff-4b20-9a56-78331a268e28\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962219 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1a285682-cde2-4857-9e6f-e41577d083de-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962244 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad71de77-0b33-48ff-86d1-87235f83b4bf-config\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962320 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962361 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lw927\" (UniqueName: \"kubernetes.io/projected/22a560b7-9ab6-434e-b5f6-fe8692561dd4-kube-api-access-lw927\") pod \"dns-default-2gdp8\" (UID: \"22a560b7-9ab6-434e-b5f6-fe8692561dd4\") " pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962387 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/81e480c0-aa4c-485f-b69b-570d1edc1ef7-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4tl6v\" (UID: \"81e480c0-aa4c-485f-b69b-570d1edc1ef7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962427 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962457 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a285682-cde2-4857-9e6f-e41577d083de-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962508 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962535 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nphwc\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962565 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v8kt\" (UniqueName: \"kubernetes.io/projected/6d447c3b-5da9-443c-aeff-aa202692a222-kube-api-access-8v8kt\") pod \"marketplace-operator-79b997595-nphwc\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962594 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-certificates\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962622 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d8b0502f-2954-4ebc-9920-28afad95dc00-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jjj6x\" (UID: \"d8b0502f-2954-4ebc-9920-28afad95dc00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962651 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b38c4eae-27cf-40fd-89ee-6513d241b130-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962681 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962708 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znthd\" (UniqueName: \"kubernetes.io/projected/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-kube-api-access-znthd\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962736 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqg5j\" (UniqueName: \"kubernetes.io/projected/bc735a51-3df7-4004-9fce-421450c9d084-kube-api-access-dqg5j\") pod \"cluster-samples-operator-665b6dd947-l5dft\" (UID: \"bc735a51-3df7-4004-9fce-421450c9d084\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962766 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdhdh\" (UniqueName: \"kubernetes.io/projected/15a55000-f900-4061-a9fc-6983f8ba066e-kube-api-access-zdhdh\") pod \"ingress-canary-9gwng\" (UID: \"15a55000-f900-4061-a9fc-6983f8ba066e\") " pod="openshift-ingress-canary/ingress-canary-9gwng" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962794 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/81e480c0-aa4c-485f-b69b-570d1edc1ef7-proxy-tls\") pod \"machine-config-controller-84d6567774-4tl6v\" (UID: \"81e480c0-aa4c-485f-b69b-570d1edc1ef7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962820 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962844 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/388f5e1e-b01b-4321-99ca-f97f3812f98f-images\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962871 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkbhl\" (UniqueName: \"kubernetes.io/projected/81e480c0-aa4c-485f-b69b-570d1edc1ef7-kube-api-access-jkbhl\") pod \"machine-config-controller-84d6567774-4tl6v\" (UID: \"81e480c0-aa4c-485f-b69b-570d1edc1ef7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962921 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-mountpoint-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962953 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-trusted-ca\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.962979 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1a285682-cde2-4857-9e6f-e41577d083de-audit-dir\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963040 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6353800-5d71-4732-a97d-3aeb02b4d648-serving-cert\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963071 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-bound-sa-token\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963097 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aa2370d-27db-4547-95f3-f09274275737-config-volume\") pod \"collect-profiles-29415525-jdvh2\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963121 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aa2370d-27db-4547-95f3-f09274275737-secret-volume\") pod \"collect-profiles-29415525-jdvh2\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963146 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nphwc\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963190 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lm9k\" (UniqueName: \"kubernetes.io/projected/8b70a66a-f89a-4578-adbc-50757fdb730f-kube-api-access-5lm9k\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963216 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8b70a66a-f89a-4578-adbc-50757fdb730f-etcd-service-ca\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963236 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/767efcaf-90a1-47ea-93a7-0583e15cbd3d-tmpfs\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963261 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/895d4707-26ef-44b2-aecc-d80f274b4b92-serving-cert\") pod \"service-ca-operator-777779d784-7sfns\" (UID: \"895d4707-26ef-44b2-aecc-d80f274b4b92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963325 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963349 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/767efcaf-90a1-47ea-93a7-0583e15cbd3d-apiservice-cert\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963413 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf5c2\" (UniqueName: \"kubernetes.io/projected/b38c4eae-27cf-40fd-89ee-6513d241b130-kube-api-access-gf5c2\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963439 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m467d\" (UniqueName: \"kubernetes.io/projected/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-kube-api-access-m467d\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963463 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd7xr\" (UniqueName: \"kubernetes.io/projected/0462b518-e848-4601-bcf6-d30a321e2191-kube-api-access-dd7xr\") pod \"catalog-operator-68c6474976-sdrhf\" (UID: \"0462b518-e848-4601-bcf6-d30a321e2191\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963522 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-metrics-certs\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963564 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jgkv\" (UniqueName: \"kubernetes.io/projected/506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f-kube-api-access-7jgkv\") pod \"downloads-7954f5f757-vf2bj\" (UID: \"506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f\") " pod="openshift-console/downloads-7954f5f757-vf2bj" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963589 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a0934bb9-807d-4fac-90db-4535e626a2b2-node-bootstrap-token\") pod \"machine-config-server-xjs5g\" (UID: \"a0934bb9-807d-4fac-90db-4535e626a2b2\") " pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963690 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0aec87cf-7349-4cd6-8364-333bf8614193-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-n9mw4\" (UID: \"0aec87cf-7349-4cd6-8364-333bf8614193\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.963727 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/9357b561-29c1-4fb1-9004-8bf8378aad02-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-tsztt\" (UID: \"9357b561-29c1-4fb1-9004-8bf8378aad02\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964069 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-policies\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964184 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-stats-auth\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964443 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tmcn\" (UniqueName: \"kubernetes.io/projected/c6353800-5d71-4732-a97d-3aeb02b4d648-kube-api-access-9tmcn\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964470 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-tls\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964495 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8b70a66a-f89a-4578-adbc-50757fdb730f-etcd-ca\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964538 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65q4s\" (UniqueName: \"kubernetes.io/projected/ad71de77-0b33-48ff-86d1-87235f83b4bf-kube-api-access-65q4s\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964562 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0462b518-e848-4601-bcf6-d30a321e2191-profile-collector-cert\") pod \"catalog-operator-68c6474976-sdrhf\" (UID: \"0462b518-e848-4601-bcf6-d30a321e2191\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964596 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/bc735a51-3df7-4004-9fce-421450c9d084-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-l5dft\" (UID: \"bc735a51-3df7-4004-9fce-421450c9d084\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964620 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-dir\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964642 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964672 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6sjh\" (UniqueName: \"kubernetes.io/projected/74f4896d-2ffa-459a-a62d-9c29df96e5dc-kube-api-access-m6sjh\") pod \"migrator-59844c95c7-bvs6h\" (UID: \"74f4896d-2ffa-459a-a62d-9c29df96e5dc\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964696 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/edf32fc4-b4ff-4b20-9a56-78331a268e28-profile-collector-cert\") pod \"olm-operator-6b444d44fb-dkhwk\" (UID: \"edf32fc4-b4ff-4b20-9a56-78331a268e28\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964718 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964741 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/388f5e1e-b01b-4321-99ca-f97f3812f98f-proxy-tls\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964765 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnk28\" (UniqueName: \"kubernetes.io/projected/895d4707-26ef-44b2-aecc-d80f274b4b92-kube-api-access-xnk28\") pod \"service-ca-operator-777779d784-7sfns\" (UID: \"895d4707-26ef-44b2-aecc-d80f274b4b92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964791 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt7nt\" (UniqueName: \"kubernetes.io/projected/7e23855f-2f97-407a-89ba-7af3b1e7e70a-kube-api-access-kt7nt\") pod \"service-ca-9c57cc56f-k6tg9\" (UID: \"7e23855f-2f97-407a-89ba-7af3b1e7e70a\") " pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964814 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8t86\" (UniqueName: \"kubernetes.io/projected/d8b0502f-2954-4ebc-9920-28afad95dc00-kube-api-access-z8t86\") pod \"multus-admission-controller-857f4d67dd-jjj6x\" (UID: \"d8b0502f-2954-4ebc-9920-28afad95dc00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964836 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a0934bb9-807d-4fac-90db-4535e626a2b2-certs\") pod \"machine-config-server-xjs5g\" (UID: \"a0934bb9-807d-4fac-90db-4535e626a2b2\") " pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964868 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964893 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b70a66a-f89a-4578-adbc-50757fdb730f-serving-cert\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964918 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/edf32fc4-b4ff-4b20-9a56-78331a268e28-srv-cert\") pod \"olm-operator-6b444d44fb-dkhwk\" (UID: \"edf32fc4-b4ff-4b20-9a56-78331a268e28\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964941 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj7dj\" (UniqueName: \"kubernetes.io/projected/1a285682-cde2-4857-9e6f-e41577d083de-kube-api-access-sj7dj\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.964967 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22a560b7-9ab6-434e-b5f6-fe8692561dd4-config-volume\") pod \"dns-default-2gdp8\" (UID: \"22a560b7-9ab6-434e-b5f6-fe8692561dd4\") " pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965008 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965033 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b38c4eae-27cf-40fd-89ee-6513d241b130-metrics-tls\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965058 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkr6g\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-kube-api-access-mkr6g\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965081 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b38c4eae-27cf-40fd-89ee-6513d241b130-trusted-ca\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965104 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad71de77-0b33-48ff-86d1-87235f83b4bf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965128 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1a285682-cde2-4857-9e6f-e41577d083de-audit-policies\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965151 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-csi-data-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965225 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-service-ca-bundle\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965253 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/df1aa6de-1ebf-4646-9a50-aa735e7ce529-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7wfct\" (UID: \"df1aa6de-1ebf-4646-9a50-aa735e7ce529\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965304 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aec87cf-7349-4cd6-8364-333bf8614193-config\") pod \"kube-apiserver-operator-766d6c64bb-n9mw4\" (UID: \"0aec87cf-7349-4cd6-8364-333bf8614193\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965352 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-config\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965380 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-client-ca\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965406 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-serving-cert\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965433 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-socket-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.965494 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/388f5e1e-b01b-4321-99ca-f97f3812f98f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.967324 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-trusted-ca\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.968399 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b70a66a-f89a-4578-adbc-50757fdb730f-config\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.968891 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-dir\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.968970 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1a285682-cde2-4857-9e6f-e41577d083de-audit-dir\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.969138 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-policies\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.969229 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.969904 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8b70a66a-f89a-4578-adbc-50757fdb730f-etcd-service-ca\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.970482 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aec87cf-7349-4cd6-8364-333bf8614193-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-n9mw4\" (UID: \"0aec87cf-7349-4cd6-8364-333bf8614193\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.970911 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.972755 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad71de77-0b33-48ff-86d1-87235f83b4bf-config\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.973370 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ad71de77-0b33-48ff-86d1-87235f83b4bf-images\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.973418 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-certificates\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.975879 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a285682-cde2-4857-9e6f-e41577d083de-serving-cert\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.976439 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/425046d9-b7c9-4b15-be69-2b2ab11aad8f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.977156 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d8b0502f-2954-4ebc-9920-28afad95dc00-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jjj6x\" (UID: \"d8b0502f-2954-4ebc-9920-28afad95dc00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.977630 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.977721 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1a285682-cde2-4857-9e6f-e41577d083de-encryption-config\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.977736 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-metrics-certs\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: E1205 10:50:08.978156 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:09.478137193 +0000 UTC m=+136.426254887 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.978325 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.979773 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a285682-cde2-4857-9e6f-e41577d083de-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.981783 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f13e37-538f-4ef3-9b4f-0be841ea1078-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-cfqtb\" (UID: \"b3f13e37-538f-4ef3-9b4f-0be841ea1078\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.982247 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1a285682-cde2-4857-9e6f-e41577d083de-etcd-client\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.982320 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.983361 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0462b518-e848-4601-bcf6-d30a321e2191-profile-collector-cert\") pod \"catalog-operator-68c6474976-sdrhf\" (UID: \"0462b518-e848-4601-bcf6-d30a321e2191\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.984804 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/425046d9-b7c9-4b15-be69-2b2ab11aad8f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.986428 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8b70a66a-f89a-4578-adbc-50757fdb730f-etcd-ca\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.987472 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f13e37-538f-4ef3-9b4f-0be841ea1078-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-cfqtb\" (UID: \"b3f13e37-538f-4ef3-9b4f-0be841ea1078\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.989021 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.989096 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.990936 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.991241 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-config\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.992476 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-service-ca-bundle\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.992539 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aec87cf-7349-4cd6-8364-333bf8614193-config\") pod \"kube-apiserver-operator-766d6c64bb-n9mw4\" (UID: \"0aec87cf-7349-4cd6-8364-333bf8614193\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.992597 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-client-ca\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.993859 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1a285682-cde2-4857-9e6f-e41577d083de-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.994418 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.996182 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/9357b561-29c1-4fb1-9004-8bf8378aad02-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-tsztt\" (UID: \"9357b561-29c1-4fb1-9004-8bf8378aad02\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.996329 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b38c4eae-27cf-40fd-89ee-6513d241b130-trusted-ca\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.996715 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8b70a66a-f89a-4578-adbc-50757fdb730f-etcd-client\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.996758 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1a285682-cde2-4857-9e6f-e41577d083de-audit-policies\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.997670 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:08 crc kubenswrapper[5014]: I1205 10:50:08.998522 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0462b518-e848-4601-bcf6-d30a321e2191-srv-cert\") pod \"catalog-operator-68c6474976-sdrhf\" (UID: \"0462b518-e848-4601-bcf6-d30a321e2191\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.000134 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.000744 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-serving-cert\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.005573 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/edf32fc4-b4ff-4b20-9a56-78331a268e28-srv-cert\") pod \"olm-operator-6b444d44fb-dkhwk\" (UID: \"edf32fc4-b4ff-4b20-9a56-78331a268e28\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.005631 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-default-certificate\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.006864 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/bc735a51-3df7-4004-9fce-421450c9d084-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-l5dft\" (UID: \"bc735a51-3df7-4004-9fce-421450c9d084\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.007928 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-tls\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.008616 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.008980 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b70a66a-f89a-4578-adbc-50757fdb730f-serving-cert\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.009170 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.009255 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/edf32fc4-b4ff-4b20-9a56-78331a268e28-profile-collector-cert\") pod \"olm-operator-6b444d44fb-dkhwk\" (UID: \"edf32fc4-b4ff-4b20-9a56-78331a268e28\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.011116 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b38c4eae-27cf-40fd-89ee-6513d241b130-metrics-tls\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.014263 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.017053 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-bound-sa-token\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.017237 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad71de77-0b33-48ff-86d1-87235f83b4bf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.019135 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-stats-auth\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.041375 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lm9k\" (UniqueName: \"kubernetes.io/projected/8b70a66a-f89a-4578-adbc-50757fdb730f-kube-api-access-5lm9k\") pod \"etcd-operator-b45778765-q4jjv\" (UID: \"8b70a66a-f89a-4578-adbc-50757fdb730f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.065924 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rstcd"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066416 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066696 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/767efcaf-90a1-47ea-93a7-0583e15cbd3d-apiservice-cert\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066757 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a0934bb9-807d-4fac-90db-4535e626a2b2-node-bootstrap-token\") pod \"machine-config-server-xjs5g\" (UID: \"a0934bb9-807d-4fac-90db-4535e626a2b2\") " pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066785 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tmcn\" (UniqueName: \"kubernetes.io/projected/c6353800-5d71-4732-a97d-3aeb02b4d648-kube-api-access-9tmcn\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066828 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/388f5e1e-b01b-4321-99ca-f97f3812f98f-proxy-tls\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066846 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnk28\" (UniqueName: \"kubernetes.io/projected/895d4707-26ef-44b2-aecc-d80f274b4b92-kube-api-access-xnk28\") pod \"service-ca-operator-777779d784-7sfns\" (UID: \"895d4707-26ef-44b2-aecc-d80f274b4b92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066861 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt7nt\" (UniqueName: \"kubernetes.io/projected/7e23855f-2f97-407a-89ba-7af3b1e7e70a-kube-api-access-kt7nt\") pod \"service-ca-9c57cc56f-k6tg9\" (UID: \"7e23855f-2f97-407a-89ba-7af3b1e7e70a\") " pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066877 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a0934bb9-807d-4fac-90db-4535e626a2b2-certs\") pod \"machine-config-server-xjs5g\" (UID: \"a0934bb9-807d-4fac-90db-4535e626a2b2\") " pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066905 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22a560b7-9ab6-434e-b5f6-fe8692561dd4-config-volume\") pod \"dns-default-2gdp8\" (UID: \"22a560b7-9ab6-434e-b5f6-fe8692561dd4\") " pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066931 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-csi-data-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066957 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/df1aa6de-1ebf-4646-9a50-aa735e7ce529-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7wfct\" (UID: \"df1aa6de-1ebf-4646-9a50-aa735e7ce529\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066976 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-socket-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.066994 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/388f5e1e-b01b-4321-99ca-f97f3812f98f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067013 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/22a560b7-9ab6-434e-b5f6-fe8692561dd4-metrics-tls\") pod \"dns-default-2gdp8\" (UID: \"22a560b7-9ab6-434e-b5f6-fe8692561dd4\") " pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067028 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4h9b\" (UniqueName: \"kubernetes.io/projected/a0934bb9-807d-4fac-90db-4535e626a2b2-kube-api-access-g4h9b\") pod \"machine-config-server-xjs5g\" (UID: \"a0934bb9-807d-4fac-90db-4535e626a2b2\") " pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067086 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df1aa6de-1ebf-4646-9a50-aa735e7ce529-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7wfct\" (UID: \"df1aa6de-1ebf-4646-9a50-aa735e7ce529\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067102 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/767efcaf-90a1-47ea-93a7-0583e15cbd3d-webhook-cert\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067127 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsjb7\" (UniqueName: \"kubernetes.io/projected/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-kube-api-access-xsjb7\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067143 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7e23855f-2f97-407a-89ba-7af3b1e7e70a-signing-cabundle\") pod \"service-ca-9c57cc56f-k6tg9\" (UID: \"7e23855f-2f97-407a-89ba-7af3b1e7e70a\") " pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067160 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df1aa6de-1ebf-4646-9a50-aa735e7ce529-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7wfct\" (UID: \"df1aa6de-1ebf-4646-9a50-aa735e7ce529\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067176 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8hgt\" (UniqueName: \"kubernetes.io/projected/388f5e1e-b01b-4321-99ca-f97f3812f98f-kube-api-access-l8hgt\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067195 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxk5p\" (UniqueName: \"kubernetes.io/projected/9aa2370d-27db-4547-95f3-f09274275737-kube-api-access-jxk5p\") pod \"collect-profiles-29415525-jdvh2\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067212 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-plugins-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067228 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-registration-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067265 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/895d4707-26ef-44b2-aecc-d80f274b4b92-config\") pod \"service-ca-operator-777779d784-7sfns\" (UID: \"895d4707-26ef-44b2-aecc-d80f274b4b92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067303 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c6353800-5d71-4732-a97d-3aeb02b4d648-trusted-ca\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067329 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6353800-5d71-4732-a97d-3aeb02b4d648-config\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067346 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7e23855f-2f97-407a-89ba-7af3b1e7e70a-signing-key\") pod \"service-ca-9c57cc56f-k6tg9\" (UID: \"7e23855f-2f97-407a-89ba-7af3b1e7e70a\") " pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067377 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klhnn\" (UniqueName: \"kubernetes.io/projected/767efcaf-90a1-47ea-93a7-0583e15cbd3d-kube-api-access-klhnn\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067395 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/15a55000-f900-4061-a9fc-6983f8ba066e-cert\") pod \"ingress-canary-9gwng\" (UID: \"15a55000-f900-4061-a9fc-6983f8ba066e\") " pod="openshift-ingress-canary/ingress-canary-9gwng" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067420 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lw927\" (UniqueName: \"kubernetes.io/projected/22a560b7-9ab6-434e-b5f6-fe8692561dd4-kube-api-access-lw927\") pod \"dns-default-2gdp8\" (UID: \"22a560b7-9ab6-434e-b5f6-fe8692561dd4\") " pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067439 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/81e480c0-aa4c-485f-b69b-570d1edc1ef7-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4tl6v\" (UID: \"81e480c0-aa4c-485f-b69b-570d1edc1ef7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067465 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nphwc\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067484 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v8kt\" (UniqueName: \"kubernetes.io/projected/6d447c3b-5da9-443c-aeff-aa202692a222-kube-api-access-8v8kt\") pod \"marketplace-operator-79b997595-nphwc\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067520 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdhdh\" (UniqueName: \"kubernetes.io/projected/15a55000-f900-4061-a9fc-6983f8ba066e-kube-api-access-zdhdh\") pod \"ingress-canary-9gwng\" (UID: \"15a55000-f900-4061-a9fc-6983f8ba066e\") " pod="openshift-ingress-canary/ingress-canary-9gwng" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067537 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/81e480c0-aa4c-485f-b69b-570d1edc1ef7-proxy-tls\") pod \"machine-config-controller-84d6567774-4tl6v\" (UID: \"81e480c0-aa4c-485f-b69b-570d1edc1ef7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.067586 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/388f5e1e-b01b-4321-99ca-f97f3812f98f-images\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.068910 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kjk6\" (UniqueName: \"kubernetes.io/projected/8088b3b8-62ae-4e37-8d98-8072fe5ac30b-kube-api-access-6kjk6\") pod \"router-default-5444994796-5qwb7\" (UID: \"8088b3b8-62ae-4e37-8d98-8072fe5ac30b\") " pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.071925 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/22a560b7-9ab6-434e-b5f6-fe8692561dd4-metrics-tls\") pod \"dns-default-2gdp8\" (UID: \"22a560b7-9ab6-434e-b5f6-fe8692561dd4\") " pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.072234 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkbhl\" (UniqueName: \"kubernetes.io/projected/81e480c0-aa4c-485f-b69b-570d1edc1ef7-kube-api-access-jkbhl\") pod \"machine-config-controller-84d6567774-4tl6v\" (UID: \"81e480c0-aa4c-485f-b69b-570d1edc1ef7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.072262 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-mountpoint-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.072315 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6353800-5d71-4732-a97d-3aeb02b4d648-serving-cert\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.072341 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aa2370d-27db-4547-95f3-f09274275737-config-volume\") pod \"collect-profiles-29415525-jdvh2\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.072359 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aa2370d-27db-4547-95f3-f09274275737-secret-volume\") pod \"collect-profiles-29415525-jdvh2\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.072381 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nphwc\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.072414 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/767efcaf-90a1-47ea-93a7-0583e15cbd3d-tmpfs\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.072775 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6353800-5d71-4732-a97d-3aeb02b4d648-config\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.072825 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-csi-data-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.072473 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/895d4707-26ef-44b2-aecc-d80f274b4b92-serving-cert\") pod \"service-ca-operator-777779d784-7sfns\" (UID: \"895d4707-26ef-44b2-aecc-d80f274b4b92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.073374 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/895d4707-26ef-44b2-aecc-d80f274b4b92-config\") pod \"service-ca-operator-777779d784-7sfns\" (UID: \"895d4707-26ef-44b2-aecc-d80f274b4b92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.095657 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nphwc\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.096550 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c6353800-5d71-4732-a97d-3aeb02b4d648-trusted-ca\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.096614 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/388f5e1e-b01b-4321-99ca-f97f3812f98f-proxy-tls\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.097660 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/81e480c0-aa4c-485f-b69b-570d1edc1ef7-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4tl6v\" (UID: \"81e480c0-aa4c-485f-b69b-570d1edc1ef7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:09 crc kubenswrapper[5014]: E1205 10:50:09.099103 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:09.599070045 +0000 UTC m=+136.547187749 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.099944 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/388f5e1e-b01b-4321-99ca-f97f3812f98f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.100645 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22a560b7-9ab6-434e-b5f6-fe8692561dd4-config-volume\") pod \"dns-default-2gdp8\" (UID: \"22a560b7-9ab6-434e-b5f6-fe8692561dd4\") " pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.102381 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbvdq\" (UniqueName: \"kubernetes.io/projected/b3f13e37-538f-4ef3-9b4f-0be841ea1078-kube-api-access-fbvdq\") pod \"openshift-controller-manager-operator-756b6f6bc6-cfqtb\" (UID: \"b3f13e37-538f-4ef3-9b4f-0be841ea1078\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.102776 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-registration-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.105610 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b38c4eae-27cf-40fd-89ee-6513d241b130-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.105670 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-plugins-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.106525 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7e23855f-2f97-407a-89ba-7af3b1e7e70a-signing-cabundle\") pod \"service-ca-9c57cc56f-k6tg9\" (UID: \"7e23855f-2f97-407a-89ba-7af3b1e7e70a\") " pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.107421 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-socket-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.107636 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/81e480c0-aa4c-485f-b69b-570d1edc1ef7-proxy-tls\") pod \"machine-config-controller-84d6567774-4tl6v\" (UID: \"81e480c0-aa4c-485f-b69b-570d1edc1ef7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.108114 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df1aa6de-1ebf-4646-9a50-aa735e7ce529-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7wfct\" (UID: \"df1aa6de-1ebf-4646-9a50-aa735e7ce529\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.108766 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a0934bb9-807d-4fac-90db-4535e626a2b2-certs\") pod \"machine-config-server-xjs5g\" (UID: \"a0934bb9-807d-4fac-90db-4535e626a2b2\") " pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.109544 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/767efcaf-90a1-47ea-93a7-0583e15cbd3d-tmpfs\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.114537 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/388f5e1e-b01b-4321-99ca-f97f3812f98f-images\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.117072 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-mountpoint-dir\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.119845 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7e23855f-2f97-407a-89ba-7af3b1e7e70a-signing-key\") pod \"service-ca-9c57cc56f-k6tg9\" (UID: \"7e23855f-2f97-407a-89ba-7af3b1e7e70a\") " pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.120490 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aa2370d-27db-4547-95f3-f09274275737-secret-volume\") pod \"collect-profiles-29415525-jdvh2\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.121388 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-djp4n"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.122229 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/767efcaf-90a1-47ea-93a7-0583e15cbd3d-apiservice-cert\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.122525 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/767efcaf-90a1-47ea-93a7-0583e15cbd3d-webhook-cert\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.125103 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nphwc\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.126496 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jgkv\" (UniqueName: \"kubernetes.io/projected/506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f-kube-api-access-7jgkv\") pod \"downloads-7954f5f757-vf2bj\" (UID: \"506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f\") " pod="openshift-console/downloads-7954f5f757-vf2bj" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.126567 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/895d4707-26ef-44b2-aecc-d80f274b4b92-serving-cert\") pod \"service-ca-operator-777779d784-7sfns\" (UID: \"895d4707-26ef-44b2-aecc-d80f274b4b92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.127523 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aa2370d-27db-4547-95f3-f09274275737-config-volume\") pod \"collect-profiles-29415525-jdvh2\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.131159 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6353800-5d71-4732-a97d-3aeb02b4d648-serving-cert\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.132410 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/15a55000-f900-4061-a9fc-6983f8ba066e-cert\") pod \"ingress-canary-9gwng\" (UID: \"15a55000-f900-4061-a9fc-6983f8ba066e\") " pod="openshift-ingress-canary/ingress-canary-9gwng" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.136836 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65q4s\" (UniqueName: \"kubernetes.io/projected/ad71de77-0b33-48ff-86d1-87235f83b4bf-kube-api-access-65q4s\") pod \"machine-api-operator-5694c8668f-b2znf\" (UID: \"ad71de77-0b33-48ff-86d1-87235f83b4bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.139046 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df1aa6de-1ebf-4646-9a50-aa735e7ce529-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7wfct\" (UID: \"df1aa6de-1ebf-4646-9a50-aa735e7ce529\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.139910 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g5c8p"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.149809 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-bzvs8"] Dec 05 10:50:09 crc kubenswrapper[5014]: W1205 10:50:09.153615 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17e4e5df_7027_456d_be2d_b412f4a379ea.slice/crio-78e3b7d4e1a44c33f6e72cb8eeb806d4fd479b9cd677954f82f3ddd0781fed22 WatchSource:0}: Error finding container 78e3b7d4e1a44c33f6e72cb8eeb806d4fd479b9cd677954f82f3ddd0781fed22: Status 404 returned error can't find the container with id 78e3b7d4e1a44c33f6e72cb8eeb806d4fd479b9cd677954f82f3ddd0781fed22 Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.165978 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a0934bb9-807d-4fac-90db-4535e626a2b2-node-bootstrap-token\") pod \"machine-config-server-xjs5g\" (UID: \"a0934bb9-807d-4fac-90db-4535e626a2b2\") " pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.172636 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6sjh\" (UniqueName: \"kubernetes.io/projected/74f4896d-2ffa-459a-a62d-9c29df96e5dc-kube-api-access-m6sjh\") pod \"migrator-59844c95c7-bvs6h\" (UID: \"74f4896d-2ffa-459a-a62d-9c29df96e5dc\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.174884 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:09 crc kubenswrapper[5014]: E1205 10:50:09.175448 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:09.675428321 +0000 UTC m=+136.623546025 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.177559 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8t86\" (UniqueName: \"kubernetes.io/projected/d8b0502f-2954-4ebc-9920-28afad95dc00-kube-api-access-z8t86\") pod \"multus-admission-controller-857f4d67dd-jjj6x\" (UID: \"d8b0502f-2954-4ebc-9920-28afad95dc00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.181981 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-vf2bj" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.186219 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.198252 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds6vl\" (UniqueName: \"kubernetes.io/projected/9357b561-29c1-4fb1-9004-8bf8378aad02-kube-api-access-ds6vl\") pod \"control-plane-machine-set-operator-78cbb6b69f-tsztt\" (UID: \"9357b561-29c1-4fb1-9004-8bf8378aad02\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.203088 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.215155 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0aec87cf-7349-4cd6-8364-333bf8614193-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-n9mw4\" (UID: \"0aec87cf-7349-4cd6-8364-333bf8614193\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.226501 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.235021 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqg5j\" (UniqueName: \"kubernetes.io/projected/bc735a51-3df7-4004-9fce-421450c9d084-kube-api-access-dqg5j\") pod \"cluster-samples-operator-665b6dd947-l5dft\" (UID: \"bc735a51-3df7-4004-9fce-421450c9d084\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.235182 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.246703 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.247204 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.253576 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvmss\" (UniqueName: \"kubernetes.io/projected/edf32fc4-b4ff-4b20-9a56-78331a268e28-kube-api-access-gvmss\") pod \"olm-operator-6b444d44fb-dkhwk\" (UID: \"edf32fc4-b4ff-4b20-9a56-78331a268e28\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.271152 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd7xr\" (UniqueName: \"kubernetes.io/projected/0462b518-e848-4601-bcf6-d30a321e2191-kube-api-access-dd7xr\") pod \"catalog-operator-68c6474976-sdrhf\" (UID: \"0462b518-e848-4601-bcf6-d30a321e2191\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:09 crc kubenswrapper[5014]: W1205 10:50:09.271731 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1dd3feb2_7ce4_46d3_9e9f_c329afde30e8.slice/crio-9ee9111047f3b099da6ec45342e569d733536b4dc3b09004a78f00dbea4c0620 WatchSource:0}: Error finding container 9ee9111047f3b099da6ec45342e569d733536b4dc3b09004a78f00dbea4c0620: Status 404 returned error can't find the container with id 9ee9111047f3b099da6ec45342e569d733536b4dc3b09004a78f00dbea4c0620 Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.275673 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:09 crc kubenswrapper[5014]: E1205 10:50:09.276467 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:09.776431348 +0000 UTC m=+136.724549052 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.292175 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.298521 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m467d\" (UniqueName: \"kubernetes.io/projected/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-kube-api-access-m467d\") pod \"oauth-openshift-558db77b4-7x7jf\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.313742 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf5c2\" (UniqueName: \"kubernetes.io/projected/b38c4eae-27cf-40fd-89ee-6513d241b130-kube-api-access-gf5c2\") pod \"ingress-operator-5b745b69d9-sm5bg\" (UID: \"b38c4eae-27cf-40fd-89ee-6513d241b130\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.338067 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.345768 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkr6g\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-kube-api-access-mkr6g\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.365588 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj7dj\" (UniqueName: \"kubernetes.io/projected/1a285682-cde2-4857-9e6f-e41577d083de-kube-api-access-sj7dj\") pod \"apiserver-7bbb656c7d-lf5z4\" (UID: \"1a285682-cde2-4857-9e6f-e41577d083de\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.376397 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znthd\" (UniqueName: \"kubernetes.io/projected/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-kube-api-access-znthd\") pod \"controller-manager-879f6c89f-5wnsv\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.378248 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:09 crc kubenswrapper[5014]: E1205 10:50:09.378803 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:09.878788894 +0000 UTC m=+136.826906588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.397398 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.399328 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.406082 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.408185 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.414412 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.415976 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt7nt\" (UniqueName: \"kubernetes.io/projected/7e23855f-2f97-407a-89ba-7af3b1e7e70a-kube-api-access-kt7nt\") pod \"service-ca-9c57cc56f-k6tg9\" (UID: \"7e23855f-2f97-407a-89ba-7af3b1e7e70a\") " pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.435144 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxk5p\" (UniqueName: \"kubernetes.io/projected/9aa2370d-27db-4547-95f3-f09274275737-kube-api-access-jxk5p\") pod \"collect-profiles-29415525-jdvh2\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.452989 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.453803 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.453774 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klhnn\" (UniqueName: \"kubernetes.io/projected/767efcaf-90a1-47ea-93a7-0583e15cbd3d-kube-api-access-klhnn\") pod \"packageserver-d55dfcdfc-xjhrz\" (UID: \"767efcaf-90a1-47ea-93a7-0583e15cbd3d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.462756 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.468693 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" event={"ID":"9230af7f-443e-452e-b3ba-8bd78a0f8211","Type":"ContainerStarted","Data":"1bf420b512c7f38b2dab80cfed02d2c8b4924896a620e5f2c27be4109a5a574f"} Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.469227 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.471965 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bzvs8" event={"ID":"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8","Type":"ContainerStarted","Data":"9ee9111047f3b099da6ec45342e569d733536b4dc3b09004a78f00dbea4c0620"} Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.473960 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tmcn\" (UniqueName: \"kubernetes.io/projected/c6353800-5d71-4732-a97d-3aeb02b4d648-kube-api-access-9tmcn\") pod \"console-operator-58897d9998-2mtlm\" (UID: \"c6353800-5d71-4732-a97d-3aeb02b4d648\") " pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.475894 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" event={"ID":"5c620bea-8aad-4e50-8088-68e259884c27","Type":"ContainerStarted","Data":"f711ba13c073654bcf9ceb0407b0344b2196c91e7e4a5a384da1fd7400fefba6"} Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.475950 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" event={"ID":"5c620bea-8aad-4e50-8088-68e259884c27","Type":"ContainerStarted","Data":"d50d902c38d080f5a040556f951fc6c2a6a9d7b78212a6f11823d1553af17df2"} Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.479238 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:09 crc kubenswrapper[5014]: E1205 10:50:09.479756 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:09.979730599 +0000 UTC m=+136.927848303 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.480906 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" event={"ID":"61134bd8-7840-4ba7-8ec8-02e41ed425cb","Type":"ContainerStarted","Data":"76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06"} Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.480986 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" event={"ID":"61134bd8-7840-4ba7-8ec8-02e41ed425cb","Type":"ContainerStarted","Data":"e41701387477810a7a18b71773580232ace045f7b372d5c890845fa7263ccbf7"} Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.481203 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.482895 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" event={"ID":"e73a6475-be76-44c4-8fd9-eb5c6799e7fc","Type":"ContainerStarted","Data":"b7f72b29cf0314d7b07944a2831aa7356da546816a627babe53391f21a6db241"} Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.484376 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" event={"ID":"e8af4e41-2e1b-4164-abc3-587460928aab","Type":"ContainerStarted","Data":"799b0984a26a6f0d14cd1390c0a5b1c3267d94076e226fbc35c5a0754f30f8d6"} Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.485803 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" event={"ID":"17e4e5df-7027-456d-be2d-b412f4a379ea","Type":"ContainerStarted","Data":"78e3b7d4e1a44c33f6e72cb8eeb806d4fd479b9cd677954f82f3ddd0781fed22"} Dec 05 10:50:09 crc kubenswrapper[5014]: W1205 10:50:09.489846 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode53ac2ff_ffbc_4437_8c8d_a8e10387bc2e.slice/crio-0e65eea8e6ed370bd0e39f89beb6cbe09ba4c187eb8e7e0b9605572293ec1fd3 WatchSource:0}: Error finding container 0e65eea8e6ed370bd0e39f89beb6cbe09ba4c187eb8e7e0b9605572293ec1fd3: Status 404 returned error can't find the container with id 0e65eea8e6ed370bd0e39f89beb6cbe09ba4c187eb8e7e0b9605572293ec1fd3 Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.490956 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" event={"ID":"7c810fdc-b085-41e9-9f84-d09b8b28d809","Type":"ContainerStarted","Data":"dec1016a71aef834ffb17008a4b6781696e038eb27bfe072e0e752a12eb3a77a"} Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.497547 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnk28\" (UniqueName: \"kubernetes.io/projected/895d4707-26ef-44b2-aecc-d80f274b4b92-kube-api-access-xnk28\") pod \"service-ca-operator-777779d784-7sfns\" (UID: \"895d4707-26ef-44b2-aecc-d80f274b4b92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.509652 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdhdh\" (UniqueName: \"kubernetes.io/projected/15a55000-f900-4061-a9fc-6983f8ba066e-kube-api-access-zdhdh\") pod \"ingress-canary-9gwng\" (UID: \"15a55000-f900-4061-a9fc-6983f8ba066e\") " pod="openshift-ingress-canary/ingress-canary-9gwng" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.511183 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.543498 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v8kt\" (UniqueName: \"kubernetes.io/projected/6d447c3b-5da9-443c-aeff-aa202692a222-kube-api-access-8v8kt\") pod \"marketplace-operator-79b997595-nphwc\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.562664 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lw927\" (UniqueName: \"kubernetes.io/projected/22a560b7-9ab6-434e-b5f6-fe8692561dd4-kube-api-access-lw927\") pod \"dns-default-2gdp8\" (UID: \"22a560b7-9ab6-434e-b5f6-fe8692561dd4\") " pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.570366 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/df1aa6de-1ebf-4646-9a50-aa735e7ce529-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7wfct\" (UID: \"df1aa6de-1ebf-4646-9a50-aa735e7ce529\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.579794 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.580835 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:09 crc kubenswrapper[5014]: E1205 10:50:09.582532 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:10.082514587 +0000 UTC m=+137.030632281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.591061 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.592632 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4h9b\" (UniqueName: \"kubernetes.io/projected/a0934bb9-807d-4fac-90db-4535e626a2b2-kube-api-access-g4h9b\") pod \"machine-config-server-xjs5g\" (UID: \"a0934bb9-807d-4fac-90db-4535e626a2b2\") " pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.596180 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.595596 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q4jjv"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.612791 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.618773 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.622213 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.622381 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsjb7\" (UniqueName: \"kubernetes.io/projected/a921bd69-8bfd-4ab3-bfed-87fb64dbf061-kube-api-access-xsjb7\") pod \"csi-hostpathplugin-778n8\" (UID: \"a921bd69-8bfd-4ab3-bfed-87fb64dbf061\") " pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.633663 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.648024 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8hgt\" (UniqueName: \"kubernetes.io/projected/388f5e1e-b01b-4321-99ca-f97f3812f98f-kube-api-access-l8hgt\") pod \"machine-config-operator-74547568cd-j7d5t\" (UID: \"388f5e1e-b01b-4321-99ca-f97f3812f98f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.648107 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.653614 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-xjs5g" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.657370 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkbhl\" (UniqueName: \"kubernetes.io/projected/81e480c0-aa4c-485f-b69b-570d1edc1ef7-kube-api-access-jkbhl\") pod \"machine-config-controller-84d6567774-4tl6v\" (UID: \"81e480c0-aa4c-485f-b69b-570d1edc1ef7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.658894 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9gwng" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.667762 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.681616 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:09 crc kubenswrapper[5014]: E1205 10:50:09.682174 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:10.182147405 +0000 UTC m=+137.130265109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.683405 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-778n8" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.690323 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.784080 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:09 crc kubenswrapper[5014]: E1205 10:50:09.784633 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:10.284615215 +0000 UTC m=+137.232732919 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.884852 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:09 crc kubenswrapper[5014]: E1205 10:50:09.886082 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:10.386057775 +0000 UTC m=+137.334175479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.907514 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.913230 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.927856 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.928027 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-b2znf"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.929044 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.947364 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-vf2bj"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.977371 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb"] Dec 05 10:50:09 crc kubenswrapper[5014]: I1205 10:50:09.987200 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:09 crc kubenswrapper[5014]: E1205 10:50:09.987652 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:10.487635079 +0000 UTC m=+137.435752783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.090329 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:10 crc kubenswrapper[5014]: E1205 10:50:10.091186 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:10.591164799 +0000 UTC m=+137.539282503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: W1205 10:50:10.135288 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad71de77_0b33_48ff_86d1_87235f83b4bf.slice/crio-56dca431d3d638b5779682aeaa328182e6a57eb6608ec5b890efa074c582278f WatchSource:0}: Error finding container 56dca431d3d638b5779682aeaa328182e6a57eb6608ec5b890efa074c582278f: Status 404 returned error can't find the container with id 56dca431d3d638b5779682aeaa328182e6a57eb6608ec5b890efa074c582278f Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.188308 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4"] Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.192592 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:10 crc kubenswrapper[5014]: E1205 10:50:10.192956 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:10.692942848 +0000 UTC m=+137.641060552 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.290978 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt"] Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.295446 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:10 crc kubenswrapper[5014]: E1205 10:50:10.295877 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:10.795858901 +0000 UTC m=+137.743976605 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.301232 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7x7jf"] Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.330935 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jjj6x"] Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.381668 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h"] Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.397790 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:10 crc kubenswrapper[5014]: E1205 10:50:10.398203 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:10.898188806 +0000 UTC m=+137.846306510 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.406606 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5wnsv"] Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.501384 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:10 crc kubenswrapper[5014]: E1205 10:50:10.502112 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.002091927 +0000 UTC m=+137.950209631 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.563654 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" event={"ID":"bc735a51-3df7-4004-9fce-421450c9d084","Type":"ContainerStarted","Data":"13a5b7d89532f7fbc994912e610d6498648a3b918aa7c4410b9921f52c640bd0"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.577503 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" event={"ID":"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e","Type":"ContainerStarted","Data":"0e65eea8e6ed370bd0e39f89beb6cbe09ba4c187eb8e7e0b9605572293ec1fd3"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.598636 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" event={"ID":"505e68d1-6316-4847-a116-79e58bbc711d","Type":"ContainerStarted","Data":"8ce34cc0983ad65ddca68d3d4937ee52a79cff28822e3388142a8877682f4384"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.602939 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:10 crc kubenswrapper[5014]: E1205 10:50:10.605877 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.105854083 +0000 UTC m=+138.053971787 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.664066 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf"] Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.676015 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" podStartSLOduration=120.675979369 podStartE2EDuration="2m0.675979369s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:10.67495662 +0000 UTC m=+137.623074334" watchObservedRunningTime="2025-12-05 10:50:10.675979369 +0000 UTC m=+137.624097073" Dec 05 10:50:10 crc kubenswrapper[5014]: W1205 10:50:10.696862 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9357b561_29c1_4fb1_9004_8bf8378aad02.slice/crio-36d71c704a5c24efeae1da2486ed92b6b982dbd188c31719c1e6fd359a27a903 WatchSource:0}: Error finding container 36d71c704a5c24efeae1da2486ed92b6b982dbd188c31719c1e6fd359a27a903: Status 404 returned error can't find the container with id 36d71c704a5c24efeae1da2486ed92b6b982dbd188c31719c1e6fd359a27a903 Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.704776 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:10 crc kubenswrapper[5014]: E1205 10:50:10.705352 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.205328686 +0000 UTC m=+138.153446390 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.727349 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" event={"ID":"e73a6475-be76-44c4-8fd9-eb5c6799e7fc","Type":"ContainerStarted","Data":"da30a1b01375da7c680fd3d21e55bec46cf7aa47706f9c0981ab75e4308fe472"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.748147 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fsgvs" podStartSLOduration=119.748121083 podStartE2EDuration="1m59.748121083s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:10.714544083 +0000 UTC m=+137.662661787" watchObservedRunningTime="2025-12-05 10:50:10.748121083 +0000 UTC m=+137.696238787" Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.806882 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:10 crc kubenswrapper[5014]: E1205 10:50:10.807875 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.307858517 +0000 UTC m=+138.255976221 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.835175 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bzvs8" event={"ID":"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8","Type":"ContainerStarted","Data":"ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.851734 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-r2lwj" event={"ID":"7c810fdc-b085-41e9-9f84-d09b8b28d809","Type":"ContainerStarted","Data":"a5933387762e1de5e6b5b9fc662380af6dbe623f04b5dae32205a5b2f539a8e6"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.882445 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" event={"ID":"a1714e18-7b9d-435d-9602-caf20c5a43da","Type":"ContainerStarted","Data":"59db25bafd4a3abce23888a1cc0ae29c38caab20a0cd99830124f72f20f824d7"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.882509 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" event={"ID":"a1714e18-7b9d-435d-9602-caf20c5a43da","Type":"ContainerStarted","Data":"da11478de8e5ed027a6399fda3d30a8b07c2b88169e3459a64dc1724dd7981a1"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.886326 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" event={"ID":"0aec87cf-7349-4cd6-8364-333bf8614193","Type":"ContainerStarted","Data":"84caa572c58f389456ce0d64b1d4b5709abc1797661b916be55728da96f16945"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.897877 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-5qwb7" event={"ID":"8088b3b8-62ae-4e37-8d98-8072fe5ac30b","Type":"ContainerStarted","Data":"ef7921b20ddaa58f9c5e1c3ec1ecf9b4c0f27da250d198105d41080fe494e427"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.912791 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:10 crc kubenswrapper[5014]: E1205 10:50:10.913623 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.413592252 +0000 UTC m=+138.361709946 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.922607 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.922832 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk"] Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.924419 5014 generic.go:334] "Generic (PLEG): container finished" podID="e8af4e41-2e1b-4164-abc3-587460928aab" containerID="b3bb668d27afb019861b1dc981289f553ce0c14f9d0c52fec7f04532300ac40c" exitCode=0 Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.924526 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" event={"ID":"e8af4e41-2e1b-4164-abc3-587460928aab","Type":"ContainerDied","Data":"b3bb668d27afb019861b1dc981289f553ce0c14f9d0c52fec7f04532300ac40c"} Dec 05 10:50:10 crc kubenswrapper[5014]: E1205 10:50:10.925066 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.425039352 +0000 UTC m=+138.373157106 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.938319 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg"] Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.945188 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz"] Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.946192 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vf2bj" event={"ID":"506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f","Type":"ContainerStarted","Data":"03ef18578ff0ed836eee9b90560bc5484359b3604c69d51208ebb230eec527e3"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.967646 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" event={"ID":"17e4e5df-7027-456d-be2d-b412f4a379ea","Type":"ContainerStarted","Data":"68d07ae85612a55452c5c9f88017831e4da971f141e4b754d5cc0be97c9be36b"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.969050 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" event={"ID":"ad71de77-0b33-48ff-86d1-87235f83b4bf","Type":"ContainerStarted","Data":"56dca431d3d638b5779682aeaa328182e6a57eb6608ec5b890efa074c582278f"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.970189 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" event={"ID":"9230af7f-443e-452e-b3ba-8bd78a0f8211","Type":"ContainerStarted","Data":"013942b737e75c35dce2860bfde0342f54a58888200322376b9fae622e1e3e49"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.970868 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" event={"ID":"a70c7642-31e1-473d-9ebc-22af0ca255ec","Type":"ContainerStarted","Data":"74924ae41508cb2a750f31d8b6250739e54cdd694c41fcdeead6aff385fef4ce"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.971832 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" event={"ID":"b3f13e37-538f-4ef3-9b4f-0be841ea1078","Type":"ContainerStarted","Data":"e6167e38e7e5ec929d0af1e90fd013b04440cb6d690d0757df04fa77478717f3"} Dec 05 10:50:10 crc kubenswrapper[5014]: I1205 10:50:10.973820 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" event={"ID":"8b70a66a-f89a-4578-adbc-50757fdb730f","Type":"ContainerStarted","Data":"156ec292ae4c8c5798856ae8fbec5785aebfd3d6e8d5f43eb93251d19cdd0a15"} Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.025973 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.026294 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.526249185 +0000 UTC m=+138.474366939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.026885 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.028653 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.528638614 +0000 UTC m=+138.476756318 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.083673 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" podStartSLOduration=120.083626992 podStartE2EDuration="2m0.083626992s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:11.082782428 +0000 UTC m=+138.030900152" watchObservedRunningTime="2025-12-05 10:50:11.083626992 +0000 UTC m=+138.031744696" Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.127762 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.128105 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.628056416 +0000 UTC m=+138.576174120 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.128649 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.129088 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.629070475 +0000 UTC m=+138.577188179 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.188333 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-djp4n" podStartSLOduration=121.188310046 podStartE2EDuration="2m1.188310046s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:11.185488924 +0000 UTC m=+138.133606648" watchObservedRunningTime="2025-12-05 10:50:11.188310046 +0000 UTC m=+138.136427770" Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.226841 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jtgzb" podStartSLOduration=121.226806208 podStartE2EDuration="2m1.226806208s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:11.226592592 +0000 UTC m=+138.174710316" watchObservedRunningTime="2025-12-05 10:50:11.226806208 +0000 UTC m=+138.174923912" Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.230195 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.230566 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.730540275 +0000 UTC m=+138.678657979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.230955 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.231374 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.73136703 +0000 UTC m=+138.679484734 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.297116 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-2gdp8"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.298851 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-k6tg9"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.310001 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9gwng"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.313433 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-778n8"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.338960 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.339550 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.839526273 +0000 UTC m=+138.787643977 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.353605 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-bzvs8" podStartSLOduration=121.353577099 podStartE2EDuration="2m1.353577099s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:11.340161342 +0000 UTC m=+138.288279056" watchObservedRunningTime="2025-12-05 10:50:11.353577099 +0000 UTC m=+138.301694803" Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.364692 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nphwc"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.364738 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.364750 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.364765 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2mtlm"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.367643 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7sfns"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.373910 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.377726 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.394260 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4"] Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.441593 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.445183 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:11.945159985 +0000 UTC m=+138.893277689 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.546663 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.547759 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:12.047714066 +0000 UTC m=+138.995831770 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.547853 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.548758 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:12.048749457 +0000 UTC m=+138.996867161 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.648837 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.649636 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:12.14961508 +0000 UTC m=+139.097732784 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.751054 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.752229 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:12.25210042 +0000 UTC m=+139.200218124 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.854158 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.855484 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:12.355459745 +0000 UTC m=+139.303577449 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:11 crc kubenswrapper[5014]: I1205 10:50:11.971240 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:11 crc kubenswrapper[5014]: E1205 10:50:11.971754 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:12.471736603 +0000 UTC m=+139.419854297 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:11.996247 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" event={"ID":"767efcaf-90a1-47ea-93a7-0583e15cbd3d","Type":"ContainerStarted","Data":"7e1f1b03f18d43043b07d42e90377ec43b85568dd9819ee81e7be05c5d888c97"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.001122 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" event={"ID":"d8b0502f-2954-4ebc-9920-28afad95dc00","Type":"ContainerStarted","Data":"9ff047508418ba9304245b7ebe68b6d75525afaa603f7532246a1a528322db6c"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.014625 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-xjs5g" event={"ID":"a0934bb9-807d-4fac-90db-4535e626a2b2","Type":"ContainerStarted","Data":"40223e9d5ffdb1a7648bdea66da1fcf81b8796e6c7e315ce7217c317c89bdb1a"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.029056 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" event={"ID":"0aec87cf-7349-4cd6-8364-333bf8614193","Type":"ContainerStarted","Data":"f2cd97fc4b120aca74895eb5fc57bc271901fcf1631b481f951c99032225210b"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.056577 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h" event={"ID":"74f4896d-2ffa-459a-a62d-9c29df96e5dc","Type":"ContainerStarted","Data":"19e2fb983b9f900d216ed4b65d5df11a71d52630eb50e71502c1c47ca7c78adf"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.058188 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" event={"ID":"895d4707-26ef-44b2-aecc-d80f274b4b92","Type":"ContainerStarted","Data":"d478a6c6e4680ba405f2ba36ae156c1f3019cf3e3020a325727de44aff3f9e60"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.064713 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" event={"ID":"9230af7f-443e-452e-b3ba-8bd78a0f8211","Type":"ContainerStarted","Data":"c8519295d73be0c166541d119868a923089f6e862e3bbd0f263a0f59205fc5ed"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.065134 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.067466 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" event={"ID":"e73a6475-be76-44c4-8fd9-eb5c6799e7fc","Type":"ContainerStarted","Data":"3856df2cae389b3f744fab27f214609366017a46ecb8eb1f3b049280bda34453"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.075108 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" event={"ID":"b38c4eae-27cf-40fd-89ee-6513d241b130","Type":"ContainerStarted","Data":"03816f6117ababcbae676f3fec35804d3f588d9fb4707fb7aeef93c134fc36aa"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.075750 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.076140 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:12.576116167 +0000 UTC m=+139.524233871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.077938 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2mtlm" event={"ID":"c6353800-5d71-4732-a97d-3aeb02b4d648","Type":"ContainerStarted","Data":"8ef04f10d4f74ee8f7fe44d84a15931c97b1c7277f626d22b8c92a7c84888b0e"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.093351 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" podStartSLOduration=121.093334005 podStartE2EDuration="2m1.093334005s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.091131601 +0000 UTC m=+139.039249305" watchObservedRunningTime="2025-12-05 10:50:12.093334005 +0000 UTC m=+139.041451699" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.093931 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-n9mw4" podStartSLOduration=121.093924072 podStartE2EDuration="2m1.093924072s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.057083538 +0000 UTC m=+139.005201242" watchObservedRunningTime="2025-12-05 10:50:12.093924072 +0000 UTC m=+139.042041776" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.095009 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" event={"ID":"9e347737-8d07-4246-a6fd-60e7aa5bc6ab","Type":"ContainerStarted","Data":"6f076624245b4024a5f9871a662d6b8e45a4628227c24f71c89705d774081374"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.112029 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" event={"ID":"b3f13e37-538f-4ef3-9b4f-0be841ea1078","Type":"ContainerStarted","Data":"676f0a133948e9799586119174a84fdd744f3a70ce902077e7aa384105912f20"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.120671 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-rstcd" podStartSLOduration=121.120648254 podStartE2EDuration="2m1.120648254s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.119238513 +0000 UTC m=+139.067356217" watchObservedRunningTime="2025-12-05 10:50:12.120648254 +0000 UTC m=+139.068765958" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.133261 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vf2bj" event={"ID":"506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f","Type":"ContainerStarted","Data":"8a03b8688aa4b7af7d0d17dd21e5cad98bc5899ccdbbccac93a786637ff50846"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.135121 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-vf2bj" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.136728 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" event={"ID":"ad71de77-0b33-48ff-86d1-87235f83b4bf","Type":"ContainerStarted","Data":"f73618499ceaaff256d7ce3effafc57c3c794844cf6ee2bafd5071ce153cc549"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.137761 5014 patch_prober.go:28] interesting pod/downloads-7954f5f757-vf2bj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.137793 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vf2bj" podUID="506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.159597 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" event={"ID":"bc735a51-3df7-4004-9fce-421450c9d084","Type":"ContainerStarted","Data":"1e80ce605b32cde8d8d2382ea3dd7837872ac8d78266b59f19b8e87779277c36"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.180900 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cfqtb" podStartSLOduration=121.180883933 podStartE2EDuration="2m1.180883933s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.146362636 +0000 UTC m=+139.094480360" watchObservedRunningTime="2025-12-05 10:50:12.180883933 +0000 UTC m=+139.129001637" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.181544 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-vf2bj" podStartSLOduration=122.181540153 podStartE2EDuration="2m2.181540153s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.17903645 +0000 UTC m=+139.127154164" watchObservedRunningTime="2025-12-05 10:50:12.181540153 +0000 UTC m=+139.129657857" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.182361 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.185847 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:12.685829717 +0000 UTC m=+139.633947421 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.206619 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" event={"ID":"a70c7642-31e1-473d-9ebc-22af0ca255ec","Type":"ContainerStarted","Data":"7bc93e4936e563d296ccac645b3e105012e657513cf41eee1cf44d8de0f6930c"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.217521 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" event={"ID":"9357b561-29c1-4fb1-9004-8bf8378aad02","Type":"ContainerStarted","Data":"36d71c704a5c24efeae1da2486ed92b6b982dbd188c31719c1e6fd359a27a903"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.220699 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" event={"ID":"388f5e1e-b01b-4321-99ca-f97f3812f98f","Type":"ContainerStarted","Data":"bcfd310d5079c3b1e0c6e4d3c1b5fa38c9f125a856f46b663a66c615bac2a3bc"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.233960 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" event={"ID":"81e480c0-aa4c-485f-b69b-570d1edc1ef7","Type":"ContainerStarted","Data":"8538351d832a08639017beada17ad4771b4a20045eb3391f1f047af3f40867c3"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.234960 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lzrfw" podStartSLOduration=121.234937075 podStartE2EDuration="2m1.234937075s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.234323887 +0000 UTC m=+139.182441611" watchObservedRunningTime="2025-12-05 10:50:12.234937075 +0000 UTC m=+139.183054779" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.239468 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" event={"ID":"9aa2370d-27db-4547-95f3-f09274275737","Type":"ContainerStarted","Data":"7329e5fd87b28c9ec5e2d268ab9688faa230bb5f7384b1e8474ada9fa132a908"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.244047 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" event={"ID":"0462b518-e848-4601-bcf6-d30a321e2191","Type":"ContainerStarted","Data":"6106262e24b82a28c8e03aac97c07da8930e55525c4fa5c8e058d2bd29f71d8a"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.252688 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-2gdp8" event={"ID":"22a560b7-9ab6-434e-b5f6-fe8692561dd4","Type":"ContainerStarted","Data":"c7fca961835e1c5186f8c6889b103803d0501a0bb8f3cb575930b92883f357a5"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.263248 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" event={"ID":"505e68d1-6316-4847-a116-79e58bbc711d","Type":"ContainerStarted","Data":"f7f8e7a6fe9c141c432f2c68d9c814c0966cb6150c8ca02aaa1cc9761a0d0fad"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.268340 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" event={"ID":"8b70a66a-f89a-4578-adbc-50757fdb730f","Type":"ContainerStarted","Data":"6f3f234b9e58833230399d2900914b74cd03bab5c65434ddb7dd0dcc258ea5ae"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.298640 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.300398 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:12.800339883 +0000 UTC m=+139.748457587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.307701 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" event={"ID":"6d447c3b-5da9-443c-aeff-aa202692a222","Type":"ContainerStarted","Data":"af74898c9c4def70ad1b52030bebed5f6c956f31744ab0fec971d468d0e79abd"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.339345 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-5qwb7" event={"ID":"8088b3b8-62ae-4e37-8d98-8072fe5ac30b","Type":"ContainerStarted","Data":"e0a7f6270263b54356f6251dceaf15e14ef40ec255d337eabb37f026f9440f6e"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.345725 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-nbksm" podStartSLOduration=121.345703134 podStartE2EDuration="2m1.345703134s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.296441051 +0000 UTC m=+139.244558755" watchObservedRunningTime="2025-12-05 10:50:12.345703134 +0000 UTC m=+139.293820838" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.370490 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-q4jjv" podStartSLOduration=121.3704701 podStartE2EDuration="2m1.3704701s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.34212995 +0000 UTC m=+139.290247664" watchObservedRunningTime="2025-12-05 10:50:12.3704701 +0000 UTC m=+139.318587804" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.378733 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" event={"ID":"edf32fc4-b4ff-4b20-9a56-78331a268e28","Type":"ContainerStarted","Data":"cc7508bb6d33f4ecaa03cbbabdc3d020fc8a2f0bcacf9db9917e652005578543"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.379881 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.389621 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" event={"ID":"7e23855f-2f97-407a-89ba-7af3b1e7e70a","Type":"ContainerStarted","Data":"a9769f67ac62a5ff138d5252175ee5b4a3f75f9a096eaf57de0007302b1f1244"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.392616 5014 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-dkhwk container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.392806 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" podUID="edf32fc4-b4ff-4b20-9a56-78331a268e28" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.400995 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-5qwb7" podStartSLOduration=121.4009667 podStartE2EDuration="2m1.4009667s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.37256504 +0000 UTC m=+139.320682744" watchObservedRunningTime="2025-12-05 10:50:12.4009667 +0000 UTC m=+139.349084414" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.403174 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" event={"ID":"df1aa6de-1ebf-4646-9a50-aa735e7ce529","Type":"ContainerStarted","Data":"f64b14c656387ba51f6569309730d009c3f95a4f1e96c28929355411eca2776d"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.404828 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.406218 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:12.906200701 +0000 UTC m=+139.854318405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.421107 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" event={"ID":"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb","Type":"ContainerStarted","Data":"300566bfc3f70fc0dab9aac701f2b812d894b82cf3393c421f3a95923cc85f6b"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.421876 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.437896 5014 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-5wnsv container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.437998 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" podUID="6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.439715 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" podStartSLOduration=121.439688858 podStartE2EDuration="2m1.439688858s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.399937011 +0000 UTC m=+139.348054735" watchObservedRunningTime="2025-12-05 10:50:12.439688858 +0000 UTC m=+139.387806562" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.456918 5014 generic.go:334] "Generic (PLEG): container finished" podID="e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e" containerID="0d81c938522003578173564c88929fc2f7d8ffc78b05b0d3e16bf9cddf3dbf4f" exitCode=0 Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.457057 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" event={"ID":"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e","Type":"ContainerDied","Data":"0d81c938522003578173564c88929fc2f7d8ffc78b05b0d3e16bf9cddf3dbf4f"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.469870 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-778n8" event={"ID":"a921bd69-8bfd-4ab3-bfed-87fb64dbf061","Type":"ContainerStarted","Data":"30521acb0c0ff087888a64e8aca6dd656fc565ee11a9287b69ce8a36b37d1a2b"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.471098 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" podStartSLOduration=121.471086295 podStartE2EDuration="2m1.471086295s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.470182239 +0000 UTC m=+139.418299953" watchObservedRunningTime="2025-12-05 10:50:12.471086295 +0000 UTC m=+139.419203999" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.481753 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" podStartSLOduration=121.481723262 podStartE2EDuration="2m1.481723262s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:12.439460102 +0000 UTC m=+139.387577826" watchObservedRunningTime="2025-12-05 10:50:12.481723262 +0000 UTC m=+139.429840966" Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.499923 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" event={"ID":"1a285682-cde2-4857-9e6f-e41577d083de","Type":"ContainerStarted","Data":"6e7b1eb175be70b4d5768ee774fc60e7813ace609374a6764885ad49712e8c13"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.507168 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.507563 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.007512277 +0000 UTC m=+139.955629981 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.508399 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.509720 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.00969396 +0000 UTC m=+139.957811654 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.543229 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9gwng" event={"ID":"15a55000-f900-4061-a9fc-6983f8ba066e","Type":"ContainerStarted","Data":"a2877c98b27082af027e876d8592e49b1b34e54b0ae12adddf2c4178c762c66b"} Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.610658 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.610638 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.110610535 +0000 UTC m=+140.058728239 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.611675 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.616929 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.116903127 +0000 UTC m=+140.065021031 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.712416 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.712824 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.212806276 +0000 UTC m=+140.160923980 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.828531 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.829558 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.329507197 +0000 UTC m=+140.277624951 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.929873 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.930112 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.430070151 +0000 UTC m=+140.378187855 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:12 crc kubenswrapper[5014]: I1205 10:50:12.930793 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:12 crc kubenswrapper[5014]: E1205 10:50:12.931213 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.431193274 +0000 UTC m=+140.379310978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.032802 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.033194 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.533162279 +0000 UTC m=+140.481279983 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.033479 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.033912 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.53390492 +0000 UTC m=+140.482022624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.139556 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.139940 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.639919562 +0000 UTC m=+140.588037266 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.241882 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.242759 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.742740733 +0000 UTC m=+140.690858437 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.250447 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.261459 5014 patch_prober.go:28] interesting pod/router-default-5444994796-5qwb7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 10:50:13 crc kubenswrapper[5014]: [-]has-synced failed: reason withheld Dec 05 10:50:13 crc kubenswrapper[5014]: [+]process-running ok Dec 05 10:50:13 crc kubenswrapper[5014]: healthz check failed Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.261517 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5qwb7" podUID="8088b3b8-62ae-4e37-8d98-8072fe5ac30b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.353855 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.354790 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.854767758 +0000 UTC m=+140.802885462 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.461258 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.461761 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:13.961744618 +0000 UTC m=+140.909862322 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.563970 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.564521 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:14.064492805 +0000 UTC m=+141.012610509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.588920 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-xjs5g" event={"ID":"a0934bb9-807d-4fac-90db-4535e626a2b2","Type":"ContainerStarted","Data":"54b00d9f1558634b54f69979f12bc24a3dacad0822c5a7ca3b705dcadefe2933"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.608490 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" event={"ID":"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb","Type":"ContainerStarted","Data":"649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.633557 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" event={"ID":"6d447c3b-5da9-443c-aeff-aa202692a222","Type":"ContainerStarted","Data":"be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.636474 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.640381 5014 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nphwc container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.640520 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.659406 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2mtlm" event={"ID":"c6353800-5d71-4732-a97d-3aeb02b4d648","Type":"ContainerStarted","Data":"b0d008e97f8c2c47a6216a31845c506ce4f37911f5f68d27243694342977cbb2"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.660664 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.666525 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.667156 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:14.16713344 +0000 UTC m=+141.115251144 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.668585 5014 patch_prober.go:28] interesting pod/console-operator-58897d9998-2mtlm container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/readyz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.668660 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2mtlm" podUID="c6353800-5d71-4732-a97d-3aeb02b4d648" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/readyz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.669052 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.708806 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" event={"ID":"ad71de77-0b33-48ff-86d1-87235f83b4bf","Type":"ContainerStarted","Data":"01a1afccd14299a0b60556907702fa831d629dd6b67cb9a2d5c1813639edb30d"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.731859 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-2gdp8" event={"ID":"22a560b7-9ab6-434e-b5f6-fe8692561dd4","Type":"ContainerStarted","Data":"bb76184be76a1ceb4465fbf1ae20f536a9734e53835aae2fc57b53d9e1edba21"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.762711 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" event={"ID":"81e480c0-aa4c-485f-b69b-570d1edc1ef7","Type":"ContainerStarted","Data":"a966666f28a2d7074f2f8edc3fdf5a7e276d565fe46ddd6cfa10e8064bd62e51"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.762784 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" event={"ID":"81e480c0-aa4c-485f-b69b-570d1edc1ef7","Type":"ContainerStarted","Data":"bfe5945fb12692383077248f8c6cc8c41a34c4eed651f1d834826c51cd330705"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.775651 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.777341 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:14.277307372 +0000 UTC m=+141.225425076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.789577 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9gwng" event={"ID":"15a55000-f900-4061-a9fc-6983f8ba066e","Type":"ContainerStarted","Data":"8ddbfa92d3396d85061167aa20d490ee0d1fe9d8bde12f792430297a221841be"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.826882 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" event={"ID":"767efcaf-90a1-47ea-93a7-0583e15cbd3d","Type":"ContainerStarted","Data":"cb01feeb30380ee1476f5d83c58049eb6f6a6aaf8d77e2894d6ebd6ddbb31124"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.827325 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.853120 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" event={"ID":"895d4707-26ef-44b2-aecc-d80f274b4b92","Type":"ContainerStarted","Data":"656124c39015d1a5cb6585e90cfd33a44e8af203d45b8068f266bfb6f11366ee"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.882776 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" event={"ID":"388f5e1e-b01b-4321-99ca-f97f3812f98f","Type":"ContainerStarted","Data":"82dc183603fe72c5881378e5309127a33755e9b8893136655e0abc1f201e915f"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.887727 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.888964 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:14.388943597 +0000 UTC m=+141.337061291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.891188 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" event={"ID":"d8b0502f-2954-4ebc-9920-28afad95dc00","Type":"ContainerStarted","Data":"af9e3f85697f9b8c9fe07d6b742b60589955355d216cc7b1a5b63551c97161e3"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.911404 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" event={"ID":"9aa2370d-27db-4547-95f3-f09274275737","Type":"ContainerStarted","Data":"d5fe98dca49cf644fd6c63dd35510af43dbf99c23a5ce6d8cb103e3566f01b18"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.931606 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" event={"ID":"0462b518-e848-4601-bcf6-d30a321e2191","Type":"ContainerStarted","Data":"09ea1c2ac9ffd69c9414518de67fe7a74fe5f065bf74b2e30b8607ff9fcf3747"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.932549 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.945458 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h" event={"ID":"74f4896d-2ffa-459a-a62d-9c29df96e5dc","Type":"ContainerStarted","Data":"2c10916931e4c6c1590e4941049868535f82cbe81b177cbaa29681d441fedd47"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.945538 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h" event={"ID":"74f4896d-2ffa-459a-a62d-9c29df96e5dc","Type":"ContainerStarted","Data":"6716a7ec5ad2e3492af64491d85a2753cf92ec43a3c7d9dd5fe747feb890417a"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.950320 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.951617 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" event={"ID":"9e347737-8d07-4246-a6fd-60e7aa5bc6ab","Type":"ContainerStarted","Data":"987ed194eaf0445fa6519fd37629de1b3378991b139df9c02aafb01578c362bf"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.952638 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.967494 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" event={"ID":"e8af4e41-2e1b-4164-abc3-587460928aab","Type":"ContainerStarted","Data":"41f74d30749d770b98a24c960e92aebdedc6ce04499e187320e814cf59152fac"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.976891 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" event={"ID":"e53ac2ff-ffbc-4437-8c8d-a8e10387bc2e","Type":"ContainerStarted","Data":"ba3b63fc2ae87a83cf85b1e2724847cc05ab3e0dff75f9323e1e91a1b245f31c"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.977055 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.988875 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.989001 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:14.488975845 +0000 UTC m=+141.437093549 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.989307 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:13 crc kubenswrapper[5014]: E1205 10:50:13.991994 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:14.491978513 +0000 UTC m=+141.440096207 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.995640 5014 generic.go:334] "Generic (PLEG): container finished" podID="1a285682-cde2-4857-9e6f-e41577d083de" containerID="6f35768a8e50e657f73e3eb003a33d2f2af3a6b206246549cd0d3db9908ab290" exitCode=0 Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.995896 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" event={"ID":"1a285682-cde2-4857-9e6f-e41577d083de","Type":"ContainerDied","Data":"6f35768a8e50e657f73e3eb003a33d2f2af3a6b206246549cd0d3db9908ab290"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.999724 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" event={"ID":"b38c4eae-27cf-40fd-89ee-6513d241b130","Type":"ContainerStarted","Data":"d8084dc459533bdc343042a582dcabca15c37454b59534cae634db70a4e3a8dc"} Dec 05 10:50:13 crc kubenswrapper[5014]: I1205 10:50:13.999861 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" event={"ID":"b38c4eae-27cf-40fd-89ee-6513d241b130","Type":"ContainerStarted","Data":"3d219dc2029f65a04b8b2b8a010e9140685d90cb25b5199bfa725fe88bce7c6a"} Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.013038 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" event={"ID":"bc735a51-3df7-4004-9fce-421450c9d084","Type":"ContainerStarted","Data":"7d360da4453e7c36041f32b213b242588075ea366bf0bfbb29e4e9739f4b59d2"} Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.028046 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" event={"ID":"edf32fc4-b4ff-4b20-9a56-78331a268e28","Type":"ContainerStarted","Data":"34dc160d4a7a473c2ed8f8ba3bf26e9e45733c91452a4ce665c0290d522f4b33"} Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.045629 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" event={"ID":"9357b561-29c1-4fb1-9004-8bf8378aad02","Type":"ContainerStarted","Data":"8e0d5ab755198c5765a6884aeaceaf511cd55b1df427f8ea13a072d02b52ae1f"} Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.056590 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dkhwk" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.069782 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-k6tg9" event={"ID":"7e23855f-2f97-407a-89ba-7af3b1e7e70a","Type":"ContainerStarted","Data":"d48959ae63abb1044ab74f6e28a15847a40a35162938d38680c5e9fc8db95676"} Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.088813 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" event={"ID":"df1aa6de-1ebf-4646-9a50-aa735e7ce529","Type":"ContainerStarted","Data":"7ffb83328573b21d4e8337f35801e0e35d105469dac52de064664db8e3b64082"} Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.090883 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.106742 5014 patch_prober.go:28] interesting pod/downloads-7954f5f757-vf2bj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.106833 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vf2bj" podUID="506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 05 10:50:14 crc kubenswrapper[5014]: E1205 10:50:14.109611 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:14.609566059 +0000 UTC m=+141.557683763 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.164223 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4tl6v" podStartSLOduration=123.164193966 podStartE2EDuration="2m3.164193966s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:14.126604011 +0000 UTC m=+141.074721715" watchObservedRunningTime="2025-12-05 10:50:14.164193966 +0000 UTC m=+141.112311660" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.210829 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:14 crc kubenswrapper[5014]: E1205 10:50:14.211247 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:14.711232874 +0000 UTC m=+141.659350578 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.250096 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" podStartSLOduration=124.250068606 podStartE2EDuration="2m4.250068606s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:14.246796532 +0000 UTC m=+141.194914246" watchObservedRunningTime="2025-12-05 10:50:14.250068606 +0000 UTC m=+141.198186310" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.275855 5014 patch_prober.go:28] interesting pod/router-default-5444994796-5qwb7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 10:50:14 crc kubenswrapper[5014]: [-]has-synced failed: reason withheld Dec 05 10:50:14 crc kubenswrapper[5014]: [+]process-running ok Dec 05 10:50:14 crc kubenswrapper[5014]: healthz check failed Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.275937 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5qwb7" podUID="8088b3b8-62ae-4e37-8d98-8072fe5ac30b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.300822 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bvs6h" podStartSLOduration=123.300791531 podStartE2EDuration="2m3.300791531s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:14.292355908 +0000 UTC m=+141.240473622" watchObservedRunningTime="2025-12-05 10:50:14.300791531 +0000 UTC m=+141.248909235" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.313938 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:14 crc kubenswrapper[5014]: E1205 10:50:14.314216 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:14.814198689 +0000 UTC m=+141.762316393 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.401182 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" podStartSLOduration=124.40115993 podStartE2EDuration="2m4.40115993s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:14.399953035 +0000 UTC m=+141.348070759" watchObservedRunningTime="2025-12-05 10:50:14.40115993 +0000 UTC m=+141.349277634" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.403045 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sdrhf" podStartSLOduration=123.403038954 podStartE2EDuration="2m3.403038954s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:14.352655799 +0000 UTC m=+141.300773503" watchObservedRunningTime="2025-12-05 10:50:14.403038954 +0000 UTC m=+141.351156658" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.417389 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:14 crc kubenswrapper[5014]: E1205 10:50:14.417967 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:14.917950185 +0000 UTC m=+141.866067889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.518668 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:14 crc kubenswrapper[5014]: E1205 10:50:14.519140 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:15.019123837 +0000 UTC m=+141.967241541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.622246 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:14 crc kubenswrapper[5014]: E1205 10:50:14.622762 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:15.12273853 +0000 UTC m=+142.070856234 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.633888 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" podStartSLOduration=123.633860921 podStartE2EDuration="2m3.633860921s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:14.460669939 +0000 UTC m=+141.408787643" watchObservedRunningTime="2025-12-05 10:50:14.633860921 +0000 UTC m=+141.581978625" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.636785 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l5dft" podStartSLOduration=124.636776505 podStartE2EDuration="2m4.636776505s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:14.633855971 +0000 UTC m=+141.581973695" watchObservedRunningTime="2025-12-05 10:50:14.636776505 +0000 UTC m=+141.584894209" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.724435 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:14 crc kubenswrapper[5014]: E1205 10:50:14.724845 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:15.224830418 +0000 UTC m=+142.172948122 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.804311 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-2mtlm" podStartSLOduration=124.804287074 podStartE2EDuration="2m4.804287074s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:14.795877041 +0000 UTC m=+141.743994765" watchObservedRunningTime="2025-12-05 10:50:14.804287074 +0000 UTC m=+141.752404778" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.826676 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:14 crc kubenswrapper[5014]: E1205 10:50:14.827383 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:15.32735847 +0000 UTC m=+142.275476164 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.827479 5014 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xjhrz container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.827513 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" podUID="767efcaf-90a1-47ea-93a7-0583e15cbd3d" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.24:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.919735 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" podStartSLOduration=123.919710357 podStartE2EDuration="2m3.919710357s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:14.87201854 +0000 UTC m=+141.820136244" watchObservedRunningTime="2025-12-05 10:50:14.919710357 +0000 UTC m=+141.867828081" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.928658 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:14 crc kubenswrapper[5014]: E1205 10:50:14.929087 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:15.429068798 +0000 UTC m=+142.377186502 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.955000 5014 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-7x7jf container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.30:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.955070 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" podUID="9e347737-8d07-4246-a6fd-60e7aa5bc6ab" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.30:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 10:50:14 crc kubenswrapper[5014]: I1205 10:50:14.983840 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sm5bg" podStartSLOduration=123.983819539 podStartE2EDuration="2m3.983819539s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:14.921682025 +0000 UTC m=+141.869799749" watchObservedRunningTime="2025-12-05 10:50:14.983819539 +0000 UTC m=+141.931937243" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.030543 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.031070 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:15.531051073 +0000 UTC m=+142.479168777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.074264 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7sfns" podStartSLOduration=124.074230911 podStartE2EDuration="2m4.074230911s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.072722856 +0000 UTC m=+142.020840560" watchObservedRunningTime="2025-12-05 10:50:15.074230911 +0000 UTC m=+142.022348615" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.102740 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j7d5t" event={"ID":"388f5e1e-b01b-4321-99ca-f97f3812f98f","Type":"ContainerStarted","Data":"915b97a5f03e9ed601da68ae42e2a8d5c7e8978733beab57ef6da49d8563e82e"} Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.105047 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jjj6x" event={"ID":"d8b0502f-2954-4ebc-9920-28afad95dc00","Type":"ContainerStarted","Data":"45cc2afd6d9ceedc2fba643f1838e946ffcd91b0bf249c6e8d30bcef109818d7"} Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.123495 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" event={"ID":"e8af4e41-2e1b-4164-abc3-587460928aab","Type":"ContainerStarted","Data":"69e28e2ed615318f0e400a030700e361aa718cb2e78359d1ac4a8fd166323405"} Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.132577 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.134089 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:15.634067008 +0000 UTC m=+142.582184712 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.136723 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-778n8" event={"ID":"a921bd69-8bfd-4ab3-bfed-87fb64dbf061","Type":"ContainerStarted","Data":"ef658b47ebd7463b407332d88acac1b82f488f9ab0fe7177171f92b4cc5e4163"} Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.145577 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-b2znf" podStartSLOduration=124.145549551 podStartE2EDuration="2m4.145549551s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.143616814 +0000 UTC m=+142.091734528" watchObservedRunningTime="2025-12-05 10:50:15.145549551 +0000 UTC m=+142.093667255" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.166061 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" event={"ID":"1a285682-cde2-4857-9e6f-e41577d083de","Type":"ContainerStarted","Data":"a5f662d7bf1576ebf52dc8a33d3f018b7b47831ed0dd2c48087cd37495437486"} Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.181788 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tsztt" podStartSLOduration=124.181765806 podStartE2EDuration="2m4.181765806s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.179122689 +0000 UTC m=+142.127240393" watchObservedRunningTime="2025-12-05 10:50:15.181765806 +0000 UTC m=+142.129883510" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.199284 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-2gdp8" event={"ID":"22a560b7-9ab6-434e-b5f6-fe8692561dd4","Type":"ContainerStarted","Data":"26cc1bc20261b86ec71b12ba4164719f3965ed2f8fc1f995d73d26abefd885ee"} Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.200145 5014 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nphwc container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.200211 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.200710 5014 patch_prober.go:28] interesting pod/downloads-7954f5f757-vf2bj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.200740 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vf2bj" podUID="506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.215507 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.238449 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.240350 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:15.740326867 +0000 UTC m=+142.688444571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.255882 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.267623 5014 patch_prober.go:28] interesting pod/router-default-5444994796-5qwb7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 10:50:15 crc kubenswrapper[5014]: [-]has-synced failed: reason withheld Dec 05 10:50:15 crc kubenswrapper[5014]: [+]process-running ok Dec 05 10:50:15 crc kubenswrapper[5014]: healthz check failed Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.267737 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5qwb7" podUID="8088b3b8-62ae-4e37-8d98-8072fe5ac30b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.316682 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xjhrz" podStartSLOduration=124.316649202 podStartE2EDuration="2m4.316649202s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.262491718 +0000 UTC m=+142.210609442" watchObservedRunningTime="2025-12-05 10:50:15.316649202 +0000 UTC m=+142.264766916" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.327563 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-2mtlm" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.331211 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-xjs5g" podStartSLOduration=9.331195782 podStartE2EDuration="9.331195782s" podCreationTimestamp="2025-12-05 10:50:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.315762706 +0000 UTC m=+142.263880420" watchObservedRunningTime="2025-12-05 10:50:15.331195782 +0000 UTC m=+142.279313486" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.332081 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cn8nz"] Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.333312 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.341103 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.343184 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.343567 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:15.843542019 +0000 UTC m=+142.791659933 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.358649 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" podStartSLOduration=124.358626014 podStartE2EDuration="2m4.358626014s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.354499925 +0000 UTC m=+142.302617649" watchObservedRunningTime="2025-12-05 10:50:15.358626014 +0000 UTC m=+142.306743718" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.392602 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cn8nz"] Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.440290 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" podStartSLOduration=125.440257352 podStartE2EDuration="2m5.440257352s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.436827323 +0000 UTC m=+142.384945047" watchObservedRunningTime="2025-12-05 10:50:15.440257352 +0000 UTC m=+142.388375046" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.443520 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.443663 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-utilities\") pod \"community-operators-cn8nz\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.443695 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-catalog-content\") pod \"community-operators-cn8nz\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.443720 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b584g\" (UniqueName: \"kubernetes.io/projected/a16883f7-65de-4e01-a7e3-adb349c31ea0-kube-api-access-b584g\") pod \"community-operators-cn8nz\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.444169 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:15.944147415 +0000 UTC m=+142.892265319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.500252 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2pv7s"] Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.501390 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.505158 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.513435 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" podStartSLOduration=125.513413235 podStartE2EDuration="2m5.513413235s" podCreationTimestamp="2025-12-05 10:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.510797019 +0000 UTC m=+142.458914743" watchObservedRunningTime="2025-12-05 10:50:15.513413235 +0000 UTC m=+142.461530939" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.526508 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2pv7s"] Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.548010 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.548204 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-utilities\") pod \"community-operators-cn8nz\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.548235 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-catalog-content\") pod \"community-operators-cn8nz\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.548251 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b584g\" (UniqueName: \"kubernetes.io/projected/a16883f7-65de-4e01-a7e3-adb349c31ea0-kube-api-access-b584g\") pod \"community-operators-cn8nz\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.549111 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-utilities\") pod \"community-operators-cn8nz\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.549263 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.04923351 +0000 UTC m=+142.997351214 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.550524 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-catalog-content\") pod \"community-operators-cn8nz\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.605904 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b584g\" (UniqueName: \"kubernetes.io/projected/a16883f7-65de-4e01-a7e3-adb349c31ea0-kube-api-access-b584g\") pod \"community-operators-cn8nz\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.629117 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9gwng" podStartSLOduration=9.629092886 podStartE2EDuration="9.629092886s" podCreationTimestamp="2025-12-05 10:50:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.567509327 +0000 UTC m=+142.515627031" watchObservedRunningTime="2025-12-05 10:50:15.629092886 +0000 UTC m=+142.577210590" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.651174 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.651299 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrrs9\" (UniqueName: \"kubernetes.io/projected/70885ea0-025c-45b1-9999-7a44c28312ba-kube-api-access-vrrs9\") pod \"certified-operators-2pv7s\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.651338 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-catalog-content\") pod \"certified-operators-2pv7s\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.651362 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-utilities\") pod \"certified-operators-2pv7s\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.651740 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.15172421 +0000 UTC m=+143.099841904 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.671230 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.671581 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.748109 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xr4bg"] Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.749131 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.755201 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.755448 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-utilities\") pod \"certified-operators-2pv7s\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.755565 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrrs9\" (UniqueName: \"kubernetes.io/projected/70885ea0-025c-45b1-9999-7a44c28312ba-kube-api-access-vrrs9\") pod \"certified-operators-2pv7s\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.755603 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-catalog-content\") pod \"certified-operators-2pv7s\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.756071 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-catalog-content\") pod \"certified-operators-2pv7s\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.756153 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.256130846 +0000 UTC m=+143.204248540 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.756374 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-utilities\") pod \"certified-operators-2pv7s\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.784475 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" podStartSLOduration=124.784441953 podStartE2EDuration="2m4.784441953s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.76670033 +0000 UTC m=+142.714818054" watchObservedRunningTime="2025-12-05 10:50:15.784441953 +0000 UTC m=+142.732559657" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.803115 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xr4bg"] Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.857242 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-utilities\") pod \"community-operators-xr4bg\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.857312 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-catalog-content\") pod \"community-operators-xr4bg\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.857366 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.857445 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gslwm\" (UniqueName: \"kubernetes.io/projected/32523fc8-6af2-488b-984c-e38d294adadd-kube-api-access-gslwm\") pod \"community-operators-xr4bg\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.857854 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.357838082 +0000 UTC m=+143.305955786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.886014 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7wfct" podStartSLOduration=124.885988675 podStartE2EDuration="2m4.885988675s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.883648058 +0000 UTC m=+142.831765762" watchObservedRunningTime="2025-12-05 10:50:15.885988675 +0000 UTC m=+142.834106379" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.892902 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrrs9\" (UniqueName: \"kubernetes.io/projected/70885ea0-025c-45b1-9999-7a44c28312ba-kube-api-access-vrrs9\") pod \"certified-operators-2pv7s\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.929368 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8v5hq"] Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.930585 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.958576 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.959167 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.459124578 +0000 UTC m=+143.407242282 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.959337 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gslwm\" (UniqueName: \"kubernetes.io/projected/32523fc8-6af2-488b-984c-e38d294adadd-kube-api-access-gslwm\") pod \"community-operators-xr4bg\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.959386 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-utilities\") pod \"community-operators-xr4bg\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.959420 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-catalog-content\") pod \"community-operators-xr4bg\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.959461 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:15 crc kubenswrapper[5014]: E1205 10:50:15.959896 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.45988073 +0000 UTC m=+143.407998434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.960074 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-utilities\") pod \"community-operators-xr4bg\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.960229 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-catalog-content\") pod \"community-operators-xr4bg\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.964453 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8v5hq"] Dec 05 10:50:15 crc kubenswrapper[5014]: I1205 10:50:15.969784 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-2gdp8" podStartSLOduration=9.969757955 podStartE2EDuration="9.969757955s" podCreationTimestamp="2025-12-05 10:50:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:15.959426407 +0000 UTC m=+142.907544111" watchObservedRunningTime="2025-12-05 10:50:15.969757955 +0000 UTC m=+142.917875649" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.006412 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gslwm\" (UniqueName: \"kubernetes.io/projected/32523fc8-6af2-488b-984c-e38d294adadd-kube-api-access-gslwm\") pod \"community-operators-xr4bg\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.062818 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.063237 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-utilities\") pod \"certified-operators-8v5hq\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.063331 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-catalog-content\") pod \"certified-operators-8v5hq\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.063385 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4c9w\" (UniqueName: \"kubernetes.io/projected/132d8475-31f2-4d2c-90d2-7d7739cc0fea-kube-api-access-h4c9w\") pod \"certified-operators-8v5hq\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:16 crc kubenswrapper[5014]: E1205 10:50:16.063493 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.563454111 +0000 UTC m=+143.511571805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.105647 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.125649 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.165938 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-utilities\") pod \"certified-operators-8v5hq\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.165999 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-catalog-content\") pod \"certified-operators-8v5hq\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.166054 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4c9w\" (UniqueName: \"kubernetes.io/projected/132d8475-31f2-4d2c-90d2-7d7739cc0fea-kube-api-access-h4c9w\") pod \"certified-operators-8v5hq\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.166076 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:16 crc kubenswrapper[5014]: E1205 10:50:16.166421 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.666408735 +0000 UTC m=+143.614526439 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.166729 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-catalog-content\") pod \"certified-operators-8v5hq\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.167077 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-utilities\") pod \"certified-operators-8v5hq\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.221823 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4c9w\" (UniqueName: \"kubernetes.io/projected/132d8475-31f2-4d2c-90d2-7d7739cc0fea-kube-api-access-h4c9w\") pod \"certified-operators-8v5hq\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.257824 5014 patch_prober.go:28] interesting pod/router-default-5444994796-5qwb7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 10:50:16 crc kubenswrapper[5014]: [-]has-synced failed: reason withheld Dec 05 10:50:16 crc kubenswrapper[5014]: [+]process-running ok Dec 05 10:50:16 crc kubenswrapper[5014]: healthz check failed Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.257881 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5qwb7" podUID="8088b3b8-62ae-4e37-8d98-8072fe5ac30b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.267601 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.267689 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:16 crc kubenswrapper[5014]: E1205 10:50:16.268215 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.768187884 +0000 UTC m=+143.716305588 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.271835 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-778n8" event={"ID":"a921bd69-8bfd-4ab3-bfed-87fb64dbf061","Type":"ContainerStarted","Data":"dcc7250fd04c39a546c25d6f8dba8d2c02e60c35425c928e28b68017d2934cb1"} Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.271875 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-778n8" event={"ID":"a921bd69-8bfd-4ab3-bfed-87fb64dbf061","Type":"ContainerStarted","Data":"e75a770385013ff6db75e807d82538186ffd6dd095d851fa022a84f44e4c1a56"} Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.274595 5014 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nphwc container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.274686 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.379041 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:16 crc kubenswrapper[5014]: E1205 10:50:16.385578 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.885564724 +0000 UTC m=+143.833682428 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.456129 5014 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.480975 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:16 crc kubenswrapper[5014]: E1205 10:50:16.481545 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 10:50:16.981524316 +0000 UTC m=+143.929642020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.487416 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cn8nz"] Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.503991 5014 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T10:50:16.456180605Z","Handler":null,"Name":""} Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.583112 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:16 crc kubenswrapper[5014]: E1205 10:50:16.584004 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 10:50:17.083986626 +0000 UTC m=+144.032104330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2s2hb" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.608537 5014 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.608591 5014 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.689927 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.831999 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.904201 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.943782 5014 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 10:50:16 crc kubenswrapper[5014]: I1205 10:50:16.943836 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.029205 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xr4bg"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.075308 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.076472 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.088434 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.091564 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.091933 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.174261 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2s2hb\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.215140 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e11bf39-431d-4a57-848b-ba9de73e67eb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3e11bf39-431d-4a57-848b-ba9de73e67eb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.215206 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3e11bf39-431d-4a57-848b-ba9de73e67eb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3e11bf39-431d-4a57-848b-ba9de73e67eb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.242439 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8v5hq"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.267119 5014 patch_prober.go:28] interesting pod/router-default-5444994796-5qwb7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 10:50:17 crc kubenswrapper[5014]: [-]has-synced failed: reason withheld Dec 05 10:50:17 crc kubenswrapper[5014]: [+]process-running ok Dec 05 10:50:17 crc kubenswrapper[5014]: healthz check failed Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.267190 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5qwb7" podUID="8088b3b8-62ae-4e37-8d98-8072fe5ac30b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.299358 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.316561 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3e11bf39-431d-4a57-848b-ba9de73e67eb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3e11bf39-431d-4a57-848b-ba9de73e67eb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.316764 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e11bf39-431d-4a57-848b-ba9de73e67eb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3e11bf39-431d-4a57-848b-ba9de73e67eb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.317262 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3e11bf39-431d-4a57-848b-ba9de73e67eb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3e11bf39-431d-4a57-848b-ba9de73e67eb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.355901 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.363969 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pxrjf"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.365144 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pxrjf"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.365173 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-778n8" event={"ID":"a921bd69-8bfd-4ab3-bfed-87fb64dbf061","Type":"ContainerStarted","Data":"1762dc49e52681850b199f752eee8d7031e807c83db1b86f21400b66127ebcb1"} Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.365322 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.367159 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e11bf39-431d-4a57-848b-ba9de73e67eb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3e11bf39-431d-4a57-848b-ba9de73e67eb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.367767 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2pv7s"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.372081 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.388134 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8v5hq" event={"ID":"132d8475-31f2-4d2c-90d2-7d7739cc0fea","Type":"ContainerStarted","Data":"c06edeb286e091c08506206921bf8e87a3ce5af86f1fd5267dd1c23838bf8b9c"} Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.407387 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xr4bg" event={"ID":"32523fc8-6af2-488b-984c-e38d294adadd","Type":"ContainerStarted","Data":"86f229082747daf1df4097e7e0bdd51927798dfb5b85b8985cfeb2d344a9826a"} Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.418432 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-catalog-content\") pod \"redhat-marketplace-pxrjf\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.418527 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-utilities\") pod \"redhat-marketplace-pxrjf\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.418585 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc5xm\" (UniqueName: \"kubernetes.io/projected/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-kube-api-access-vc5xm\") pod \"redhat-marketplace-pxrjf\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.464720 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.475643 5014 generic.go:334] "Generic (PLEG): container finished" podID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerID="c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f" exitCode=0 Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.479852 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cn8nz" event={"ID":"a16883f7-65de-4e01-a7e3-adb349c31ea0","Type":"ContainerDied","Data":"c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f"} Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.479915 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cn8nz" event={"ID":"a16883f7-65de-4e01-a7e3-adb349c31ea0","Type":"ContainerStarted","Data":"bed48e9a59ebb1215b942e6380ec450867450ba66c533a604c24185c477a95f1"} Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.488385 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.491914 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.520038 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc5xm\" (UniqueName: \"kubernetes.io/projected/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-kube-api-access-vc5xm\") pod \"redhat-marketplace-pxrjf\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.520254 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-catalog-content\") pod \"redhat-marketplace-pxrjf\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.520308 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-utilities\") pod \"redhat-marketplace-pxrjf\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.520757 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-utilities\") pod \"redhat-marketplace-pxrjf\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.523388 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-catalog-content\") pod \"redhat-marketplace-pxrjf\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.553683 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc5xm\" (UniqueName: \"kubernetes.io/projected/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-kube-api-access-vc5xm\") pod \"redhat-marketplace-pxrjf\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.702108 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dgnc6"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.703851 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.723285 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dgnc6"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.766014 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.785083 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2s2hb"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.832455 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-catalog-content\") pod \"redhat-marketplace-dgnc6\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.832527 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xv99j\" (UniqueName: \"kubernetes.io/projected/e7cf4621-ac21-4364-a447-ebea3c11082e-kube-api-access-xv99j\") pod \"redhat-marketplace-dgnc6\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.832591 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-utilities\") pod \"redhat-marketplace-dgnc6\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.938509 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-utilities\") pod \"redhat-marketplace-dgnc6\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.939050 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-catalog-content\") pod \"redhat-marketplace-dgnc6\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.939097 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xv99j\" (UniqueName: \"kubernetes.io/projected/e7cf4621-ac21-4364-a447-ebea3c11082e-kube-api-access-xv99j\") pod \"redhat-marketplace-dgnc6\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.940756 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-utilities\") pod \"redhat-marketplace-dgnc6\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.941096 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-catalog-content\") pod \"redhat-marketplace-dgnc6\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.962975 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.977291 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.989133 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 10:50:17 crc kubenswrapper[5014]: I1205 10:50:17.989492 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:17.992892 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.028176 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bjhj7" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.029758 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xv99j\" (UniqueName: \"kubernetes.io/projected/e7cf4621-ac21-4364-a447-ebea3c11082e-kube-api-access-xv99j\") pod \"redhat-marketplace-dgnc6\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.040193 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.040256 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.108862 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.141503 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.141564 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.141692 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.163892 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.212916 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pxrjf"] Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.252723 5014 patch_prober.go:28] interesting pod/router-default-5444994796-5qwb7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 10:50:18 crc kubenswrapper[5014]: [-]has-synced failed: reason withheld Dec 05 10:50:18 crc kubenswrapper[5014]: [+]process-running ok Dec 05 10:50:18 crc kubenswrapper[5014]: healthz check failed Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.252798 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5qwb7" podUID="8088b3b8-62ae-4e37-8d98-8072fe5ac30b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.299890 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.300905 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.311844 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.330293 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.464292 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.492940 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3e11bf39-431d-4a57-848b-ba9de73e67eb","Type":"ContainerStarted","Data":"c5cad4806039779fcc6e470febdafd9cd0cf470a9f3f094f471ed89f6d870e88"} Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.494920 5014 generic.go:334] "Generic (PLEG): container finished" podID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerID="027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6" exitCode=0 Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.494962 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pxrjf" event={"ID":"f35c6bb2-9a29-41b5-bfeb-39e8848b095f","Type":"ContainerDied","Data":"027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6"} Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.494978 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pxrjf" event={"ID":"f35c6bb2-9a29-41b5-bfeb-39e8848b095f","Type":"ContainerStarted","Data":"5402c35bed5e183d47b55e55fcb918efbf045181bc779dcd87771f4ac280f9fe"} Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.503155 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" event={"ID":"425046d9-b7c9-4b15-be69-2b2ab11aad8f","Type":"ContainerStarted","Data":"5fedf437f267dad37f0a2bf0c2a89ea445a55e778158d9170ab9d43374793d41"} Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.503235 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" event={"ID":"425046d9-b7c9-4b15-be69-2b2ab11aad8f","Type":"ContainerStarted","Data":"8433ee1bc332c0fe977cd50f30b397aa5d24b2a5bc5486615b1f71970bfb6912"} Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.503343 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.505133 5014 generic.go:334] "Generic (PLEG): container finished" podID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerID="628d4a8d7edb275095419a27d2b769793cc794f27d67c12e0b0d2e231239dcc4" exitCode=0 Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.505540 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8v5hq" event={"ID":"132d8475-31f2-4d2c-90d2-7d7739cc0fea","Type":"ContainerDied","Data":"628d4a8d7edb275095419a27d2b769793cc794f27d67c12e0b0d2e231239dcc4"} Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.533197 5014 generic.go:334] "Generic (PLEG): container finished" podID="70885ea0-025c-45b1-9999-7a44c28312ba" containerID="3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923" exitCode=0 Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.533399 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2pv7s" event={"ID":"70885ea0-025c-45b1-9999-7a44c28312ba","Type":"ContainerDied","Data":"3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923"} Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.533444 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2pv7s" event={"ID":"70885ea0-025c-45b1-9999-7a44c28312ba","Type":"ContainerStarted","Data":"fb40ec340edbd6dadb21cd7074353b9fca1d059a17351f875f9ce2e234c2dcfa"} Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.561977 5014 generic.go:334] "Generic (PLEG): container finished" podID="32523fc8-6af2-488b-984c-e38d294adadd" containerID="7a203623ed16b96d3a5f93c922386137e27c576d97ca51b0cc045d8a21e09d9f" exitCode=0 Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.562053 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xr4bg" event={"ID":"32523fc8-6af2-488b-984c-e38d294adadd","Type":"ContainerDied","Data":"7a203623ed16b96d3a5f93c922386137e27c576d97ca51b0cc045d8a21e09d9f"} Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.581117 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-g5c8p" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.588076 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" podStartSLOduration=127.588030137 podStartE2EDuration="2m7.588030137s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:18.548977309 +0000 UTC m=+145.497095043" watchObservedRunningTime="2025-12-05 10:50:18.588030137 +0000 UTC m=+145.536147851" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.605987 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dgnc6"] Dec 05 10:50:18 crc kubenswrapper[5014]: W1205 10:50:18.658392 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7cf4621_ac21_4364_a447_ebea3c11082e.slice/crio-56a71c3e6ecf424c730b3036c70109efcaf0622c586afc19b03a290b6489b75c WatchSource:0}: Error finding container 56a71c3e6ecf424c730b3036c70109efcaf0622c586afc19b03a290b6489b75c: Status 404 returned error can't find the container with id 56a71c3e6ecf424c730b3036c70109efcaf0622c586afc19b03a290b6489b75c Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.718407 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d46f6"] Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.722492 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.731227 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.741797 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d46f6"] Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.766313 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-utilities\") pod \"redhat-operators-d46f6\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.766402 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-catalog-content\") pod \"redhat-operators-d46f6\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.766447 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghsq7\" (UniqueName: \"kubernetes.io/projected/716f3dfe-fa59-450b-ba26-31a7a26763a3-kube-api-access-ghsq7\") pod \"redhat-operators-d46f6\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.777649 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.778514 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.785016 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-778n8" podStartSLOduration=12.784994536 podStartE2EDuration="12.784994536s" podCreationTimestamp="2025-12-05 10:50:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:18.782072512 +0000 UTC m=+145.730190246" watchObservedRunningTime="2025-12-05 10:50:18.784994536 +0000 UTC m=+145.733112240" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.797969 5014 patch_prober.go:28] interesting pod/console-f9d7485db-bzvs8 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.798064 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-bzvs8" podUID="1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" containerName="console" probeResult="failure" output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.828319 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.869659 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-utilities\") pod \"redhat-operators-d46f6\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.869728 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-catalog-content\") pod \"redhat-operators-d46f6\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.869828 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghsq7\" (UniqueName: \"kubernetes.io/projected/716f3dfe-fa59-450b-ba26-31a7a26763a3-kube-api-access-ghsq7\") pod \"redhat-operators-d46f6\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.870301 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-utilities\") pod \"redhat-operators-d46f6\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.871363 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-catalog-content\") pod \"redhat-operators-d46f6\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:18 crc kubenswrapper[5014]: I1205 10:50:18.902516 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghsq7\" (UniqueName: \"kubernetes.io/projected/716f3dfe-fa59-450b-ba26-31a7a26763a3-kube-api-access-ghsq7\") pod \"redhat-operators-d46f6\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.053768 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.110873 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hds86"] Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.112322 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.134080 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hds86"] Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.180259 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-utilities\") pod \"redhat-operators-hds86\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.180338 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkpkw\" (UniqueName: \"kubernetes.io/projected/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-kube-api-access-pkpkw\") pod \"redhat-operators-hds86\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.180379 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-catalog-content\") pod \"redhat-operators-hds86\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.186328 5014 patch_prober.go:28] interesting pod/downloads-7954f5f757-vf2bj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.186400 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vf2bj" podUID="506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.186789 5014 patch_prober.go:28] interesting pod/downloads-7954f5f757-vf2bj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.186812 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-vf2bj" podUID="506d0cfb-4a0f-49b6-ac40-9dc4f7c8816f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.251177 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.257695 5014 patch_prober.go:28] interesting pod/router-default-5444994796-5qwb7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 10:50:19 crc kubenswrapper[5014]: [+]has-synced ok Dec 05 10:50:19 crc kubenswrapper[5014]: [+]process-running ok Dec 05 10:50:19 crc kubenswrapper[5014]: healthz check failed Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.257742 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5qwb7" podUID="8088b3b8-62ae-4e37-8d98-8072fe5ac30b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.281655 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-utilities\") pod \"redhat-operators-hds86\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.282196 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-utilities\") pod \"redhat-operators-hds86\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.282328 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkpkw\" (UniqueName: \"kubernetes.io/projected/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-kube-api-access-pkpkw\") pod \"redhat-operators-hds86\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.282386 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-catalog-content\") pod \"redhat-operators-hds86\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.282797 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-catalog-content\") pod \"redhat-operators-hds86\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.307891 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkpkw\" (UniqueName: \"kubernetes.io/projected/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-kube-api-access-pkpkw\") pod \"redhat-operators-hds86\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.383930 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.384085 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.384139 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.387090 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d46f6"] Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.389965 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.390795 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.451210 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.485967 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.492821 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.515281 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.577481 5014 generic.go:334] "Generic (PLEG): container finished" podID="9aa2370d-27db-4547-95f3-f09274275737" containerID="d5fe98dca49cf644fd6c63dd35510af43dbf99c23a5ce6d8cb103e3566f01b18" exitCode=0 Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.577548 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" event={"ID":"9aa2370d-27db-4547-95f3-f09274275737","Type":"ContainerDied","Data":"d5fe98dca49cf644fd6c63dd35510af43dbf99c23a5ce6d8cb103e3566f01b18"} Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.581686 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d","Type":"ContainerStarted","Data":"95f501f7d90a8b1ad81b03a02228406b6ec6406ae626668b29cabd1b24fa2166"} Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.594471 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3e11bf39-431d-4a57-848b-ba9de73e67eb","Type":"ContainerStarted","Data":"9cd662cbff256f50bce5289aa3c4e15e4de175d135b0a4ff97e443daf8fcee6f"} Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.599953 5014 generic.go:334] "Generic (PLEG): container finished" podID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerID="a93759e111da088bfd47dbe673e2961eee6948c994ff9c722b91c69634cf411c" exitCode=0 Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.601199 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgnc6" event={"ID":"e7cf4621-ac21-4364-a447-ebea3c11082e","Type":"ContainerDied","Data":"a93759e111da088bfd47dbe673e2961eee6948c994ff9c722b91c69634cf411c"} Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.601312 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgnc6" event={"ID":"e7cf4621-ac21-4364-a447-ebea3c11082e","Type":"ContainerStarted","Data":"56a71c3e6ecf424c730b3036c70109efcaf0622c586afc19b03a290b6489b75c"} Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.614872 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.614837254 podStartE2EDuration="2.614837254s" podCreationTimestamp="2025-12-05 10:50:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:50:19.611793025 +0000 UTC m=+146.559910749" watchObservedRunningTime="2025-12-05 10:50:19.614837254 +0000 UTC m=+146.562954968" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.623595 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.623686 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.624185 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.632588 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.671622 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.733952 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 10:50:19 crc kubenswrapper[5014]: I1205 10:50:19.752174 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 10:50:19 crc kubenswrapper[5014]: W1205 10:50:19.774416 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod716f3dfe_fa59_450b_ba26_31a7a26763a3.slice/crio-4aec3ebb244362c0cafab193b6e9aef3c43297eae7a5ce71790d73bca6b30616 WatchSource:0}: Error finding container 4aec3ebb244362c0cafab193b6e9aef3c43297eae7a5ce71790d73bca6b30616: Status 404 returned error can't find the container with id 4aec3ebb244362c0cafab193b6e9aef3c43297eae7a5ce71790d73bca6b30616 Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.257261 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.264047 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-5qwb7" Dec 05 10:50:20 crc kubenswrapper[5014]: W1205 10:50:20.374640 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-35eaa0ff7bbe8d01771345140c8180273ff4d3538c763321dd1ad26f7bb47843 WatchSource:0}: Error finding container 35eaa0ff7bbe8d01771345140c8180273ff4d3538c763321dd1ad26f7bb47843: Status 404 returned error can't find the container with id 35eaa0ff7bbe8d01771345140c8180273ff4d3538c763321dd1ad26f7bb47843 Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.402058 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hds86"] Dec 05 10:50:20 crc kubenswrapper[5014]: W1205 10:50:20.582884 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-f52148283bf590483fb3f551fed0398b771cf8f9224a83d48cd5a5c41b88a53b WatchSource:0}: Error finding container f52148283bf590483fb3f551fed0398b771cf8f9224a83d48cd5a5c41b88a53b: Status 404 returned error can't find the container with id f52148283bf590483fb3f551fed0398b771cf8f9224a83d48cd5a5c41b88a53b Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.658144 5014 generic.go:334] "Generic (PLEG): container finished" podID="3e11bf39-431d-4a57-848b-ba9de73e67eb" containerID="9cd662cbff256f50bce5289aa3c4e15e4de175d135b0a4ff97e443daf8fcee6f" exitCode=0 Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.658399 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3e11bf39-431d-4a57-848b-ba9de73e67eb","Type":"ContainerDied","Data":"9cd662cbff256f50bce5289aa3c4e15e4de175d135b0a4ff97e443daf8fcee6f"} Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.681154 5014 generic.go:334] "Generic (PLEG): container finished" podID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerID="0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f" exitCode=0 Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.681243 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d46f6" event={"ID":"716f3dfe-fa59-450b-ba26-31a7a26763a3","Type":"ContainerDied","Data":"0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f"} Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.681295 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d46f6" event={"ID":"716f3dfe-fa59-450b-ba26-31a7a26763a3","Type":"ContainerStarted","Data":"4aec3ebb244362c0cafab193b6e9aef3c43297eae7a5ce71790d73bca6b30616"} Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.699057 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hds86" event={"ID":"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f","Type":"ContainerStarted","Data":"1f82580f7242151ece2a883d4c80b68bde4440b3a8135c9e21bf671c1409f177"} Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.743883 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"0362a720fe07542642c0d43c2243c9c2a0e1d7f50595465a6847a9f4478b5860"} Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.759648 5014 generic.go:334] "Generic (PLEG): container finished" podID="c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d" containerID="c7456b4a180ca8cfa77d1b8775307be4f521a0299ede103d84756f9040ba5df9" exitCode=0 Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.759771 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d","Type":"ContainerDied","Data":"c7456b4a180ca8cfa77d1b8775307be4f521a0299ede103d84756f9040ba5df9"} Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.769966 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f52148283bf590483fb3f551fed0398b771cf8f9224a83d48cd5a5c41b88a53b"} Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.787199 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"35eaa0ff7bbe8d01771345140c8180273ff4d3538c763321dd1ad26f7bb47843"} Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.789213 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:50:20 crc kubenswrapper[5014]: I1205 10:50:20.807923 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lf5z4" Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.255149 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.448076 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aa2370d-27db-4547-95f3-f09274275737-config-volume\") pod \"9aa2370d-27db-4547-95f3-f09274275737\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.448153 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxk5p\" (UniqueName: \"kubernetes.io/projected/9aa2370d-27db-4547-95f3-f09274275737-kube-api-access-jxk5p\") pod \"9aa2370d-27db-4547-95f3-f09274275737\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.448237 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aa2370d-27db-4547-95f3-f09274275737-secret-volume\") pod \"9aa2370d-27db-4547-95f3-f09274275737\" (UID: \"9aa2370d-27db-4547-95f3-f09274275737\") " Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.450822 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9aa2370d-27db-4547-95f3-f09274275737-config-volume" (OuterVolumeSpecName: "config-volume") pod "9aa2370d-27db-4547-95f3-f09274275737" (UID: "9aa2370d-27db-4547-95f3-f09274275737"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.453127 5014 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aa2370d-27db-4547-95f3-f09274275737-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.457786 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aa2370d-27db-4547-95f3-f09274275737-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9aa2370d-27db-4547-95f3-f09274275737" (UID: "9aa2370d-27db-4547-95f3-f09274275737"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.458090 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aa2370d-27db-4547-95f3-f09274275737-kube-api-access-jxk5p" (OuterVolumeSpecName: "kube-api-access-jxk5p") pod "9aa2370d-27db-4547-95f3-f09274275737" (UID: "9aa2370d-27db-4547-95f3-f09274275737"). InnerVolumeSpecName "kube-api-access-jxk5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.559917 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxk5p\" (UniqueName: \"kubernetes.io/projected/9aa2370d-27db-4547-95f3-f09274275737-kube-api-access-jxk5p\") on node \"crc\" DevicePath \"\"" Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.560452 5014 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aa2370d-27db-4547-95f3-f09274275737-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.851132 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.853678 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2" event={"ID":"9aa2370d-27db-4547-95f3-f09274275737","Type":"ContainerDied","Data":"7329e5fd87b28c9ec5e2d268ab9688faa230bb5f7384b1e8474ada9fa132a908"} Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.853748 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7329e5fd87b28c9ec5e2d268ab9688faa230bb5f7384b1e8474ada9fa132a908" Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.917243 5014 generic.go:334] "Generic (PLEG): container finished" podID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerID="89e271352c976f2ef131789d2ac846a59d18387b4cae87688e1bcc56cc3396f0" exitCode=0 Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.919551 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hds86" event={"ID":"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f","Type":"ContainerDied","Data":"89e271352c976f2ef131789d2ac846a59d18387b4cae87688e1bcc56cc3396f0"} Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.939517 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"30aa591e5fe16c29c18dab10fa62a0443302c875afef1df5395f44720e3b1f42"} Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.963134 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ee8bce674aa26fc0edbec0a077a3e01066564f4929a8eecc2fcfb3c10c41a121"} Dec 05 10:50:21 crc kubenswrapper[5014]: I1205 10:50:21.993866 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"9f5076d1b0f3f5ebb1a9b15be90adf6e46ba1f3b6d03a74d0b5f66e1a7b30768"} Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.535748 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.545675 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.605497 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e11bf39-431d-4a57-848b-ba9de73e67eb-kube-api-access\") pod \"3e11bf39-431d-4a57-848b-ba9de73e67eb\" (UID: \"3e11bf39-431d-4a57-848b-ba9de73e67eb\") " Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.605567 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kubelet-dir\") pod \"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d\" (UID: \"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d\") " Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.605737 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3e11bf39-431d-4a57-848b-ba9de73e67eb-kubelet-dir\") pod \"3e11bf39-431d-4a57-848b-ba9de73e67eb\" (UID: \"3e11bf39-431d-4a57-848b-ba9de73e67eb\") " Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.605777 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kube-api-access\") pod \"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d\" (UID: \"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d\") " Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.607658 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d" (UID: "c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.608144 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e11bf39-431d-4a57-848b-ba9de73e67eb-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "3e11bf39-431d-4a57-848b-ba9de73e67eb" (UID: "3e11bf39-431d-4a57-848b-ba9de73e67eb"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.614186 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d" (UID: "c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.637805 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e11bf39-431d-4a57-848b-ba9de73e67eb-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "3e11bf39-431d-4a57-848b-ba9de73e67eb" (UID: "3e11bf39-431d-4a57-848b-ba9de73e67eb"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.707825 5014 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3e11bf39-431d-4a57-848b-ba9de73e67eb-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.707876 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.707893 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e11bf39-431d-4a57-848b-ba9de73e67eb-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 10:50:22 crc kubenswrapper[5014]: I1205 10:50:22.707904 5014 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 10:50:23 crc kubenswrapper[5014]: I1205 10:50:23.011678 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 10:50:23 crc kubenswrapper[5014]: I1205 10:50:23.012385 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d","Type":"ContainerDied","Data":"95f501f7d90a8b1ad81b03a02228406b6ec6406ae626668b29cabd1b24fa2166"} Dec 05 10:50:23 crc kubenswrapper[5014]: I1205 10:50:23.012439 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95f501f7d90a8b1ad81b03a02228406b6ec6406ae626668b29cabd1b24fa2166" Dec 05 10:50:23 crc kubenswrapper[5014]: I1205 10:50:23.038336 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3e11bf39-431d-4a57-848b-ba9de73e67eb","Type":"ContainerDied","Data":"c5cad4806039779fcc6e470febdafd9cd0cf470a9f3f094f471ed89f6d870e88"} Dec 05 10:50:23 crc kubenswrapper[5014]: I1205 10:50:23.038415 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5cad4806039779fcc6e470febdafd9cd0cf470a9f3f094f471ed89f6d870e88" Dec 05 10:50:23 crc kubenswrapper[5014]: I1205 10:50:23.038546 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 10:50:24 crc kubenswrapper[5014]: I1205 10:50:24.674376 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-2gdp8" Dec 05 10:50:28 crc kubenswrapper[5014]: I1205 10:50:28.783963 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:28 crc kubenswrapper[5014]: I1205 10:50:28.790353 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 10:50:29 crc kubenswrapper[5014]: I1205 10:50:29.189349 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-vf2bj" Dec 05 10:50:32 crc kubenswrapper[5014]: I1205 10:50:32.937062 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 10:50:32 crc kubenswrapper[5014]: I1205 10:50:32.938180 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 10:50:33 crc kubenswrapper[5014]: I1205 10:50:33.830989 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:50:33 crc kubenswrapper[5014]: I1205 10:50:33.838770 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97abc013-62da-459c-b7ec-2a78304dcc56-metrics-certs\") pod \"network-metrics-daemon-vrt2x\" (UID: \"97abc013-62da-459c-b7ec-2a78304dcc56\") " pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:50:33 crc kubenswrapper[5014]: I1205 10:50:33.860304 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vrt2x" Dec 05 10:50:37 crc kubenswrapper[5014]: I1205 10:50:37.307609 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:50:48 crc kubenswrapper[5014]: I1205 10:50:48.962413 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mshdw" Dec 05 10:50:55 crc kubenswrapper[5014]: E1205 10:50:55.262060 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 10:50:55 crc kubenswrapper[5014]: E1205 10:50:55.262868 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b584g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-cn8nz_openshift-marketplace(a16883f7-65de-4e01-a7e3-adb349c31ea0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 10:50:55 crc kubenswrapper[5014]: E1205 10:50:55.264129 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-cn8nz" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" Dec 05 10:50:55 crc kubenswrapper[5014]: I1205 10:50:55.945711 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 10:50:55 crc kubenswrapper[5014]: E1205 10:50:55.946510 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aa2370d-27db-4547-95f3-f09274275737" containerName="collect-profiles" Dec 05 10:50:55 crc kubenswrapper[5014]: I1205 10:50:55.946566 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aa2370d-27db-4547-95f3-f09274275737" containerName="collect-profiles" Dec 05 10:50:55 crc kubenswrapper[5014]: E1205 10:50:55.946587 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e11bf39-431d-4a57-848b-ba9de73e67eb" containerName="pruner" Dec 05 10:50:55 crc kubenswrapper[5014]: I1205 10:50:55.946596 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e11bf39-431d-4a57-848b-ba9de73e67eb" containerName="pruner" Dec 05 10:50:55 crc kubenswrapper[5014]: E1205 10:50:55.946615 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d" containerName="pruner" Dec 05 10:50:55 crc kubenswrapper[5014]: I1205 10:50:55.946644 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d" containerName="pruner" Dec 05 10:50:55 crc kubenswrapper[5014]: I1205 10:50:55.946835 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aa2370d-27db-4547-95f3-f09274275737" containerName="collect-profiles" Dec 05 10:50:55 crc kubenswrapper[5014]: I1205 10:50:55.946849 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="c03ffb84-b367-4515-a8cf-8aa8a2e9ac7d" containerName="pruner" Dec 05 10:50:55 crc kubenswrapper[5014]: I1205 10:50:55.946862 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e11bf39-431d-4a57-848b-ba9de73e67eb" containerName="pruner" Dec 05 10:50:55 crc kubenswrapper[5014]: I1205 10:50:55.947563 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 10:50:56 crc kubenswrapper[5014]: I1205 10:50:56.001440 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 10:50:56 crc kubenswrapper[5014]: I1205 10:50:56.002346 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 10:50:56 crc kubenswrapper[5014]: I1205 10:50:56.005680 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 10:50:56 crc kubenswrapper[5014]: I1205 10:50:56.099391 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7eb680a2-ca73-4260-8f5f-897296700e43-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7eb680a2-ca73-4260-8f5f-897296700e43\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 10:50:56 crc kubenswrapper[5014]: I1205 10:50:56.099458 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7eb680a2-ca73-4260-8f5f-897296700e43-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7eb680a2-ca73-4260-8f5f-897296700e43\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 10:50:56 crc kubenswrapper[5014]: I1205 10:50:56.200130 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7eb680a2-ca73-4260-8f5f-897296700e43-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7eb680a2-ca73-4260-8f5f-897296700e43\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 10:50:56 crc kubenswrapper[5014]: I1205 10:50:56.200195 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7eb680a2-ca73-4260-8f5f-897296700e43-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7eb680a2-ca73-4260-8f5f-897296700e43\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 10:50:56 crc kubenswrapper[5014]: I1205 10:50:56.200250 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7eb680a2-ca73-4260-8f5f-897296700e43-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7eb680a2-ca73-4260-8f5f-897296700e43\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 10:50:56 crc kubenswrapper[5014]: I1205 10:50:56.218595 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7eb680a2-ca73-4260-8f5f-897296700e43-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7eb680a2-ca73-4260-8f5f-897296700e43\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 10:50:56 crc kubenswrapper[5014]: I1205 10:50:56.341942 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 10:50:56 crc kubenswrapper[5014]: E1205 10:50:56.911971 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-cn8nz" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" Dec 05 10:50:56 crc kubenswrapper[5014]: E1205 10:50:56.980188 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 10:50:56 crc kubenswrapper[5014]: E1205 10:50:56.980342 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gslwm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-xr4bg_openshift-marketplace(32523fc8-6af2-488b-984c-e38d294adadd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 10:50:56 crc kubenswrapper[5014]: E1205 10:50:56.982208 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-xr4bg" podUID="32523fc8-6af2-488b-984c-e38d294adadd" Dec 05 10:50:58 crc kubenswrapper[5014]: E1205 10:50:58.250191 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 10:50:58 crc kubenswrapper[5014]: E1205 10:50:58.250559 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xv99j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-dgnc6_openshift-marketplace(e7cf4621-ac21-4364-a447-ebea3c11082e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 10:50:58 crc kubenswrapper[5014]: E1205 10:50:58.253385 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-dgnc6" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" Dec 05 10:50:59 crc kubenswrapper[5014]: I1205 10:50:59.455925 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.142249 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.151504 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.169263 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.260063 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8576908a-678d-4f43-a530-4686764fd2cf-kube-api-access\") pod \"installer-9-crc\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.260190 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.260319 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-var-lock\") pod \"installer-9-crc\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.361913 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.361984 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-var-lock\") pod \"installer-9-crc\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.362023 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.362029 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8576908a-678d-4f43-a530-4686764fd2cf-kube-api-access\") pod \"installer-9-crc\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.362350 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-var-lock\") pod \"installer-9-crc\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.383128 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8576908a-678d-4f43-a530-4686764fd2cf-kube-api-access\") pod \"installer-9-crc\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:00 crc kubenswrapper[5014]: I1205 10:51:00.485490 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:01 crc kubenswrapper[5014]: E1205 10:51:01.521948 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-dgnc6" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" Dec 05 10:51:01 crc kubenswrapper[5014]: E1205 10:51:01.522466 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-xr4bg" podUID="32523fc8-6af2-488b-984c-e38d294adadd" Dec 05 10:51:01 crc kubenswrapper[5014]: E1205 10:51:01.598058 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 10:51:01 crc kubenswrapper[5014]: E1205 10:51:01.598330 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pkpkw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-hds86_openshift-marketplace(8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 10:51:01 crc kubenswrapper[5014]: E1205 10:51:01.599520 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-hds86" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" Dec 05 10:51:02 crc kubenswrapper[5014]: I1205 10:51:02.936608 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 10:51:02 crc kubenswrapper[5014]: I1205 10:51:02.937153 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 10:51:02 crc kubenswrapper[5014]: E1205 10:51:02.973902 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-hds86" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.062984 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.063166 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h4c9w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-8v5hq_openshift-marketplace(132d8475-31f2-4d2c-90d2-7d7739cc0fea): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.067334 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-8v5hq" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.093272 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.093480 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vrrs9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-2pv7s_openshift-marketplace(70885ea0-025c-45b1-9999-7a44c28312ba): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.094163 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.094339 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ghsq7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-d46f6_openshift-marketplace(716f3dfe-fa59-450b-ba26-31a7a26763a3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.095293 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-2pv7s" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.096182 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-d46f6" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.127901 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.128091 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vc5xm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-pxrjf_openshift-marketplace(f35c6bb2-9a29-41b5-bfeb-39e8848b095f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.129451 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-pxrjf" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" Dec 05 10:51:03 crc kubenswrapper[5014]: I1205 10:51:03.237245 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-vrt2x"] Dec 05 10:51:03 crc kubenswrapper[5014]: I1205 10:51:03.291437 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 10:51:03 crc kubenswrapper[5014]: W1205 10:51:03.298218 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod8576908a_678d_4f43_a530_4686764fd2cf.slice/crio-f2009442bdb747ee829961e6878372727b13b59e0db964027ae60df6f15a1070 WatchSource:0}: Error finding container f2009442bdb747ee829961e6878372727b13b59e0db964027ae60df6f15a1070: Status 404 returned error can't find the container with id f2009442bdb747ee829961e6878372727b13b59e0db964027ae60df6f15a1070 Dec 05 10:51:03 crc kubenswrapper[5014]: I1205 10:51:03.338331 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 10:51:03 crc kubenswrapper[5014]: I1205 10:51:03.367882 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" event={"ID":"97abc013-62da-459c-b7ec-2a78304dcc56","Type":"ContainerStarted","Data":"5c4ebf0f7690e1d0755344162e6f9ce329ff1b6426b63f59d3643bc6c1e73d92"} Dec 05 10:51:03 crc kubenswrapper[5014]: I1205 10:51:03.369242 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8576908a-678d-4f43-a530-4686764fd2cf","Type":"ContainerStarted","Data":"f2009442bdb747ee829961e6878372727b13b59e0db964027ae60df6f15a1070"} Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.370051 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-2pv7s" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.370908 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-pxrjf" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.371817 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-8v5hq" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" Dec 05 10:51:03 crc kubenswrapper[5014]: E1205 10:51:03.375971 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-d46f6" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" Dec 05 10:51:04 crc kubenswrapper[5014]: I1205 10:51:04.377807 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"7eb680a2-ca73-4260-8f5f-897296700e43","Type":"ContainerStarted","Data":"f01c5f2da4adbcced54ce96dc227042377faaa3975e5a41aa7aff30cdb2b3f79"} Dec 05 10:51:04 crc kubenswrapper[5014]: I1205 10:51:04.378689 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"7eb680a2-ca73-4260-8f5f-897296700e43","Type":"ContainerStarted","Data":"6706903e4fd5efa79b39c15b76c19872d51d126348dc87f1136840798d7a4b33"} Dec 05 10:51:04 crc kubenswrapper[5014]: I1205 10:51:04.380293 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" event={"ID":"97abc013-62da-459c-b7ec-2a78304dcc56","Type":"ContainerStarted","Data":"7af87faec1fdd459e75e962921cef488e0da4b6659650d8846906718cd68766b"} Dec 05 10:51:04 crc kubenswrapper[5014]: I1205 10:51:04.380373 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vrt2x" event={"ID":"97abc013-62da-459c-b7ec-2a78304dcc56","Type":"ContainerStarted","Data":"1b6aafd84ec6b0509d3cbe4247670a8aac8395067b840fd0ab73425959867bf9"} Dec 05 10:51:04 crc kubenswrapper[5014]: I1205 10:51:04.382599 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8576908a-678d-4f43-a530-4686764fd2cf","Type":"ContainerStarted","Data":"57b27251546cbcd68f7126efcb23a9461e4bc19c9c867b447f7dc9bf12ffd343"} Dec 05 10:51:04 crc kubenswrapper[5014]: I1205 10:51:04.395726 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=9.395697542 podStartE2EDuration="9.395697542s" podCreationTimestamp="2025-12-05 10:50:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:51:04.393812218 +0000 UTC m=+191.341929942" watchObservedRunningTime="2025-12-05 10:51:04.395697542 +0000 UTC m=+191.343815266" Dec 05 10:51:04 crc kubenswrapper[5014]: I1205 10:51:04.419588 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=4.419553022 podStartE2EDuration="4.419553022s" podCreationTimestamp="2025-12-05 10:51:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:51:04.411581682 +0000 UTC m=+191.359699406" watchObservedRunningTime="2025-12-05 10:51:04.419553022 +0000 UTC m=+191.367670726" Dec 05 10:51:04 crc kubenswrapper[5014]: I1205 10:51:04.449052 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-vrt2x" podStartSLOduration=173.449022653 podStartE2EDuration="2m53.449022653s" podCreationTimestamp="2025-12-05 10:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:51:04.448569259 +0000 UTC m=+191.396686993" watchObservedRunningTime="2025-12-05 10:51:04.449022653 +0000 UTC m=+191.397140367" Dec 05 10:51:05 crc kubenswrapper[5014]: I1205 10:51:05.388806 5014 generic.go:334] "Generic (PLEG): container finished" podID="7eb680a2-ca73-4260-8f5f-897296700e43" containerID="f01c5f2da4adbcced54ce96dc227042377faaa3975e5a41aa7aff30cdb2b3f79" exitCode=0 Dec 05 10:51:05 crc kubenswrapper[5014]: I1205 10:51:05.389001 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"7eb680a2-ca73-4260-8f5f-897296700e43","Type":"ContainerDied","Data":"f01c5f2da4adbcced54ce96dc227042377faaa3975e5a41aa7aff30cdb2b3f79"} Dec 05 10:51:06 crc kubenswrapper[5014]: I1205 10:51:06.609701 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 10:51:06 crc kubenswrapper[5014]: I1205 10:51:06.752921 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7eb680a2-ca73-4260-8f5f-897296700e43-kubelet-dir\") pod \"7eb680a2-ca73-4260-8f5f-897296700e43\" (UID: \"7eb680a2-ca73-4260-8f5f-897296700e43\") " Dec 05 10:51:06 crc kubenswrapper[5014]: I1205 10:51:06.752979 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7eb680a2-ca73-4260-8f5f-897296700e43-kube-api-access\") pod \"7eb680a2-ca73-4260-8f5f-897296700e43\" (UID: \"7eb680a2-ca73-4260-8f5f-897296700e43\") " Dec 05 10:51:06 crc kubenswrapper[5014]: I1205 10:51:06.753028 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7eb680a2-ca73-4260-8f5f-897296700e43-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "7eb680a2-ca73-4260-8f5f-897296700e43" (UID: "7eb680a2-ca73-4260-8f5f-897296700e43"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:51:06 crc kubenswrapper[5014]: I1205 10:51:06.753204 5014 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7eb680a2-ca73-4260-8f5f-897296700e43-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:06 crc kubenswrapper[5014]: I1205 10:51:06.758780 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eb680a2-ca73-4260-8f5f-897296700e43-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "7eb680a2-ca73-4260-8f5f-897296700e43" (UID: "7eb680a2-ca73-4260-8f5f-897296700e43"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:51:06 crc kubenswrapper[5014]: I1205 10:51:06.854496 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7eb680a2-ca73-4260-8f5f-897296700e43-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:07 crc kubenswrapper[5014]: I1205 10:51:07.399834 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"7eb680a2-ca73-4260-8f5f-897296700e43","Type":"ContainerDied","Data":"6706903e4fd5efa79b39c15b76c19872d51d126348dc87f1136840798d7a4b33"} Dec 05 10:51:07 crc kubenswrapper[5014]: I1205 10:51:07.399871 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 10:51:07 crc kubenswrapper[5014]: I1205 10:51:07.399877 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6706903e4fd5efa79b39c15b76c19872d51d126348dc87f1136840798d7a4b33" Dec 05 10:51:11 crc kubenswrapper[5014]: I1205 10:51:11.418611 5014 generic.go:334] "Generic (PLEG): container finished" podID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerID="75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289" exitCode=0 Dec 05 10:51:11 crc kubenswrapper[5014]: I1205 10:51:11.418686 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cn8nz" event={"ID":"a16883f7-65de-4e01-a7e3-adb349c31ea0","Type":"ContainerDied","Data":"75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289"} Dec 05 10:51:12 crc kubenswrapper[5014]: I1205 10:51:12.428663 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cn8nz" event={"ID":"a16883f7-65de-4e01-a7e3-adb349c31ea0","Type":"ContainerStarted","Data":"eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142"} Dec 05 10:51:12 crc kubenswrapper[5014]: I1205 10:51:12.482900 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cn8nz" podStartSLOduration=2.9446036639999997 podStartE2EDuration="57.482884515s" podCreationTimestamp="2025-12-05 10:50:15 +0000 UTC" firstStartedPulling="2025-12-05 10:50:17.48783695 +0000 UTC m=+144.435954654" lastFinishedPulling="2025-12-05 10:51:12.026117801 +0000 UTC m=+198.974235505" observedRunningTime="2025-12-05 10:51:12.478054094 +0000 UTC m=+199.426171818" watchObservedRunningTime="2025-12-05 10:51:12.482884515 +0000 UTC m=+199.431002219" Dec 05 10:51:14 crc kubenswrapper[5014]: I1205 10:51:14.445296 5014 generic.go:334] "Generic (PLEG): container finished" podID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerID="fe0da8cafc0e033377dd2b9540d3193fdf6c348b3a5d373940371a92af82bc16" exitCode=0 Dec 05 10:51:14 crc kubenswrapper[5014]: I1205 10:51:14.445376 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgnc6" event={"ID":"e7cf4621-ac21-4364-a447-ebea3c11082e","Type":"ContainerDied","Data":"fe0da8cafc0e033377dd2b9540d3193fdf6c348b3a5d373940371a92af82bc16"} Dec 05 10:51:15 crc kubenswrapper[5014]: I1205 10:51:15.451443 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgnc6" event={"ID":"e7cf4621-ac21-4364-a447-ebea3c11082e","Type":"ContainerStarted","Data":"afd2048c2f80c81ccdb7ebd36ee4d7f2f269f0b80b8c2a920cdea0fe5eacd059"} Dec 05 10:51:15 crc kubenswrapper[5014]: I1205 10:51:15.467628 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dgnc6" podStartSLOduration=3.368845451 podStartE2EDuration="58.467612786s" podCreationTimestamp="2025-12-05 10:50:17 +0000 UTC" firstStartedPulling="2025-12-05 10:50:19.761936502 +0000 UTC m=+146.710054206" lastFinishedPulling="2025-12-05 10:51:14.860703837 +0000 UTC m=+201.808821541" observedRunningTime="2025-12-05 10:51:15.466301413 +0000 UTC m=+202.414419117" watchObservedRunningTime="2025-12-05 10:51:15.467612786 +0000 UTC m=+202.415730490" Dec 05 10:51:15 crc kubenswrapper[5014]: I1205 10:51:15.673002 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:51:15 crc kubenswrapper[5014]: I1205 10:51:15.673051 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:51:15 crc kubenswrapper[5014]: I1205 10:51:15.752403 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:51:17 crc kubenswrapper[5014]: I1205 10:51:17.468723 5014 generic.go:334] "Generic (PLEG): container finished" podID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerID="c6987661e91d6480aa47ba98e96b10c79e9f92b2f0a9394a6c355dd3f70a12b6" exitCode=0 Dec 05 10:51:17 crc kubenswrapper[5014]: I1205 10:51:17.468770 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hds86" event={"ID":"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f","Type":"ContainerDied","Data":"c6987661e91d6480aa47ba98e96b10c79e9f92b2f0a9394a6c355dd3f70a12b6"} Dec 05 10:51:17 crc kubenswrapper[5014]: I1205 10:51:17.472397 5014 generic.go:334] "Generic (PLEG): container finished" podID="32523fc8-6af2-488b-984c-e38d294adadd" containerID="cc697b2e72c0aa0ec065a995a0dcffb2fd65604819fa939b17a42eeb4f2a338a" exitCode=0 Dec 05 10:51:17 crc kubenswrapper[5014]: I1205 10:51:17.472462 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xr4bg" event={"ID":"32523fc8-6af2-488b-984c-e38d294adadd","Type":"ContainerDied","Data":"cc697b2e72c0aa0ec065a995a0dcffb2fd65604819fa939b17a42eeb4f2a338a"} Dec 05 10:51:17 crc kubenswrapper[5014]: I1205 10:51:17.474859 5014 generic.go:334] "Generic (PLEG): container finished" podID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerID="005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de" exitCode=0 Dec 05 10:51:17 crc kubenswrapper[5014]: I1205 10:51:17.474880 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d46f6" event={"ID":"716f3dfe-fa59-450b-ba26-31a7a26763a3","Type":"ContainerDied","Data":"005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de"} Dec 05 10:51:18 crc kubenswrapper[5014]: I1205 10:51:18.331324 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:51:18 crc kubenswrapper[5014]: I1205 10:51:18.331867 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:51:18 crc kubenswrapper[5014]: I1205 10:51:18.403470 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:51:18 crc kubenswrapper[5014]: I1205 10:51:18.482031 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hds86" event={"ID":"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f","Type":"ContainerStarted","Data":"1ef02da32dbf874f09512964a7bf98a81a61571e8df63c941f0a76bf3abd2e42"} Dec 05 10:51:18 crc kubenswrapper[5014]: I1205 10:51:18.484759 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xr4bg" event={"ID":"32523fc8-6af2-488b-984c-e38d294adadd","Type":"ContainerStarted","Data":"9b427cbe70b6388bbc0cd915363b0b34fcc5cb15943d93110182cc81908010b9"} Dec 05 10:51:18 crc kubenswrapper[5014]: I1205 10:51:18.487450 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8v5hq" event={"ID":"132d8475-31f2-4d2c-90d2-7d7739cc0fea","Type":"ContainerStarted","Data":"0b97f3903c49ea39905781cdaba87dbf0d9dff3f16221773ae62ebcf5e2e2a0d"} Dec 05 10:51:18 crc kubenswrapper[5014]: I1205 10:51:18.489939 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d46f6" event={"ID":"716f3dfe-fa59-450b-ba26-31a7a26763a3","Type":"ContainerStarted","Data":"ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a"} Dec 05 10:51:18 crc kubenswrapper[5014]: I1205 10:51:18.500835 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hds86" podStartSLOduration=3.55204012 podStartE2EDuration="59.500816059s" podCreationTimestamp="2025-12-05 10:50:19 +0000 UTC" firstStartedPulling="2025-12-05 10:50:21.922679369 +0000 UTC m=+148.870797073" lastFinishedPulling="2025-12-05 10:51:17.871455308 +0000 UTC m=+204.819573012" observedRunningTime="2025-12-05 10:51:18.499204558 +0000 UTC m=+205.447322282" watchObservedRunningTime="2025-12-05 10:51:18.500816059 +0000 UTC m=+205.448933763" Dec 05 10:51:18 crc kubenswrapper[5014]: I1205 10:51:18.525676 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d46f6" podStartSLOduration=3.307280319 podStartE2EDuration="1m0.525653s" podCreationTimestamp="2025-12-05 10:50:18 +0000 UTC" firstStartedPulling="2025-12-05 10:50:20.750537395 +0000 UTC m=+147.698655099" lastFinishedPulling="2025-12-05 10:51:17.968910076 +0000 UTC m=+204.917027780" observedRunningTime="2025-12-05 10:51:18.521657179 +0000 UTC m=+205.469774893" watchObservedRunningTime="2025-12-05 10:51:18.525653 +0000 UTC m=+205.473770704" Dec 05 10:51:18 crc kubenswrapper[5014]: I1205 10:51:18.568777 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xr4bg" podStartSLOduration=4.207413165 podStartE2EDuration="1m3.568755368s" podCreationTimestamp="2025-12-05 10:50:15 +0000 UTC" firstStartedPulling="2025-12-05 10:50:18.597594943 +0000 UTC m=+145.545712647" lastFinishedPulling="2025-12-05 10:51:17.958937146 +0000 UTC m=+204.907054850" observedRunningTime="2025-12-05 10:51:18.567065266 +0000 UTC m=+205.515182980" watchObservedRunningTime="2025-12-05 10:51:18.568755368 +0000 UTC m=+205.516873082" Dec 05 10:51:19 crc kubenswrapper[5014]: I1205 10:51:19.058430 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:51:19 crc kubenswrapper[5014]: I1205 10:51:19.058829 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:51:19 crc kubenswrapper[5014]: I1205 10:51:19.497234 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pxrjf" event={"ID":"f35c6bb2-9a29-41b5-bfeb-39e8848b095f","Type":"ContainerStarted","Data":"b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800"} Dec 05 10:51:19 crc kubenswrapper[5014]: I1205 10:51:19.499792 5014 generic.go:334] "Generic (PLEG): container finished" podID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerID="0b97f3903c49ea39905781cdaba87dbf0d9dff3f16221773ae62ebcf5e2e2a0d" exitCode=0 Dec 05 10:51:19 crc kubenswrapper[5014]: I1205 10:51:19.499863 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8v5hq" event={"ID":"132d8475-31f2-4d2c-90d2-7d7739cc0fea","Type":"ContainerDied","Data":"0b97f3903c49ea39905781cdaba87dbf0d9dff3f16221773ae62ebcf5e2e2a0d"} Dec 05 10:51:19 crc kubenswrapper[5014]: I1205 10:51:19.513972 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:51:19 crc kubenswrapper[5014]: I1205 10:51:19.514031 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:51:20 crc kubenswrapper[5014]: I1205 10:51:20.096498 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-d46f6" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerName="registry-server" probeResult="failure" output=< Dec 05 10:51:20 crc kubenswrapper[5014]: timeout: failed to connect service ":50051" within 1s Dec 05 10:51:20 crc kubenswrapper[5014]: > Dec 05 10:51:20 crc kubenswrapper[5014]: I1205 10:51:20.571923 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hds86" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerName="registry-server" probeResult="failure" output=< Dec 05 10:51:20 crc kubenswrapper[5014]: timeout: failed to connect service ":50051" within 1s Dec 05 10:51:20 crc kubenswrapper[5014]: > Dec 05 10:51:21 crc kubenswrapper[5014]: I1205 10:51:21.510474 5014 generic.go:334] "Generic (PLEG): container finished" podID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerID="b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800" exitCode=0 Dec 05 10:51:21 crc kubenswrapper[5014]: I1205 10:51:21.510517 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pxrjf" event={"ID":"f35c6bb2-9a29-41b5-bfeb-39e8848b095f","Type":"ContainerDied","Data":"b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800"} Dec 05 10:51:25 crc kubenswrapper[5014]: I1205 10:51:25.720147 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:51:26 crc kubenswrapper[5014]: I1205 10:51:26.107161 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:51:26 crc kubenswrapper[5014]: I1205 10:51:26.107228 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:51:26 crc kubenswrapper[5014]: I1205 10:51:26.157888 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:51:26 crc kubenswrapper[5014]: I1205 10:51:26.624524 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:51:27 crc kubenswrapper[5014]: I1205 10:51:27.697616 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xr4bg"] Dec 05 10:51:28 crc kubenswrapper[5014]: I1205 10:51:28.375879 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:51:28 crc kubenswrapper[5014]: I1205 10:51:28.573026 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xr4bg" podUID="32523fc8-6af2-488b-984c-e38d294adadd" containerName="registry-server" containerID="cri-o://9b427cbe70b6388bbc0cd915363b0b34fcc5cb15943d93110182cc81908010b9" gracePeriod=2 Dec 05 10:51:29 crc kubenswrapper[5014]: I1205 10:51:29.112089 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:51:29 crc kubenswrapper[5014]: I1205 10:51:29.158265 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:51:29 crc kubenswrapper[5014]: I1205 10:51:29.562532 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:51:29 crc kubenswrapper[5014]: I1205 10:51:29.587044 5014 generic.go:334] "Generic (PLEG): container finished" podID="32523fc8-6af2-488b-984c-e38d294adadd" containerID="9b427cbe70b6388bbc0cd915363b0b34fcc5cb15943d93110182cc81908010b9" exitCode=0 Dec 05 10:51:29 crc kubenswrapper[5014]: I1205 10:51:29.587823 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xr4bg" event={"ID":"32523fc8-6af2-488b-984c-e38d294adadd","Type":"ContainerDied","Data":"9b427cbe70b6388bbc0cd915363b0b34fcc5cb15943d93110182cc81908010b9"} Dec 05 10:51:29 crc kubenswrapper[5014]: I1205 10:51:29.606481 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:51:29 crc kubenswrapper[5014]: I1205 10:51:29.901922 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.069039 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gslwm\" (UniqueName: \"kubernetes.io/projected/32523fc8-6af2-488b-984c-e38d294adadd-kube-api-access-gslwm\") pod \"32523fc8-6af2-488b-984c-e38d294adadd\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.069168 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-utilities\") pod \"32523fc8-6af2-488b-984c-e38d294adadd\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.069759 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-catalog-content\") pod \"32523fc8-6af2-488b-984c-e38d294adadd\" (UID: \"32523fc8-6af2-488b-984c-e38d294adadd\") " Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.070763 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-utilities" (OuterVolumeSpecName: "utilities") pod "32523fc8-6af2-488b-984c-e38d294adadd" (UID: "32523fc8-6af2-488b-984c-e38d294adadd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.074354 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32523fc8-6af2-488b-984c-e38d294adadd-kube-api-access-gslwm" (OuterVolumeSpecName: "kube-api-access-gslwm") pod "32523fc8-6af2-488b-984c-e38d294adadd" (UID: "32523fc8-6af2-488b-984c-e38d294adadd"). InnerVolumeSpecName "kube-api-access-gslwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.171070 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gslwm\" (UniqueName: \"kubernetes.io/projected/32523fc8-6af2-488b-984c-e38d294adadd-kube-api-access-gslwm\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.171115 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.595118 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xr4bg" event={"ID":"32523fc8-6af2-488b-984c-e38d294adadd","Type":"ContainerDied","Data":"86f229082747daf1df4097e7e0bdd51927798dfb5b85b8985cfeb2d344a9826a"} Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.595493 5014 scope.go:117] "RemoveContainer" containerID="9b427cbe70b6388bbc0cd915363b0b34fcc5cb15943d93110182cc81908010b9" Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.595192 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xr4bg" Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.699407 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dgnc6"] Dec 05 10:51:30 crc kubenswrapper[5014]: I1205 10:51:30.699810 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dgnc6" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerName="registry-server" containerID="cri-o://afd2048c2f80c81ccdb7ebd36ee4d7f2f269f0b80b8c2a920cdea0fe5eacd059" gracePeriod=2 Dec 05 10:51:31 crc kubenswrapper[5014]: I1205 10:51:31.449170 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "32523fc8-6af2-488b-984c-e38d294adadd" (UID: "32523fc8-6af2-488b-984c-e38d294adadd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:51:31 crc kubenswrapper[5014]: I1205 10:51:31.489976 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32523fc8-6af2-488b-984c-e38d294adadd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:31 crc kubenswrapper[5014]: I1205 10:51:31.525708 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xr4bg"] Dec 05 10:51:31 crc kubenswrapper[5014]: I1205 10:51:31.529049 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xr4bg"] Dec 05 10:51:31 crc kubenswrapper[5014]: I1205 10:51:31.693541 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hds86"] Dec 05 10:51:31 crc kubenswrapper[5014]: I1205 10:51:31.693809 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hds86" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerName="registry-server" containerID="cri-o://1ef02da32dbf874f09512964a7bf98a81a61571e8df63c941f0a76bf3abd2e42" gracePeriod=2 Dec 05 10:51:32 crc kubenswrapper[5014]: I1205 10:51:32.361155 5014 scope.go:117] "RemoveContainer" containerID="cc697b2e72c0aa0ec065a995a0dcffb2fd65604819fa939b17a42eeb4f2a338a" Dec 05 10:51:32 crc kubenswrapper[5014]: I1205 10:51:32.609135 5014 generic.go:334] "Generic (PLEG): container finished" podID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerID="afd2048c2f80c81ccdb7ebd36ee4d7f2f269f0b80b8c2a920cdea0fe5eacd059" exitCode=0 Dec 05 10:51:32 crc kubenswrapper[5014]: I1205 10:51:32.609177 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgnc6" event={"ID":"e7cf4621-ac21-4364-a447-ebea3c11082e","Type":"ContainerDied","Data":"afd2048c2f80c81ccdb7ebd36ee4d7f2f269f0b80b8c2a920cdea0fe5eacd059"} Dec 05 10:51:32 crc kubenswrapper[5014]: I1205 10:51:32.937167 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 10:51:32 crc kubenswrapper[5014]: I1205 10:51:32.937252 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 10:51:32 crc kubenswrapper[5014]: I1205 10:51:32.937357 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:51:32 crc kubenswrapper[5014]: I1205 10:51:32.938017 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 10:51:32 crc kubenswrapper[5014]: I1205 10:51:32.938196 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9" gracePeriod=600 Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.325164 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32523fc8-6af2-488b-984c-e38d294adadd" path="/var/lib/kubelet/pods/32523fc8-6af2-488b-984c-e38d294adadd/volumes" Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.618464 5014 generic.go:334] "Generic (PLEG): container finished" podID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerID="1ef02da32dbf874f09512964a7bf98a81a61571e8df63c941f0a76bf3abd2e42" exitCode=0 Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.618575 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hds86" event={"ID":"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f","Type":"ContainerDied","Data":"1ef02da32dbf874f09512964a7bf98a81a61571e8df63c941f0a76bf3abd2e42"} Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.635079 5014 scope.go:117] "RemoveContainer" containerID="7a203623ed16b96d3a5f93c922386137e27c576d97ca51b0cc045d8a21e09d9f" Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.700800 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.818866 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-utilities\") pod \"e7cf4621-ac21-4364-a447-ebea3c11082e\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.818945 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xv99j\" (UniqueName: \"kubernetes.io/projected/e7cf4621-ac21-4364-a447-ebea3c11082e-kube-api-access-xv99j\") pod \"e7cf4621-ac21-4364-a447-ebea3c11082e\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.819022 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-catalog-content\") pod \"e7cf4621-ac21-4364-a447-ebea3c11082e\" (UID: \"e7cf4621-ac21-4364-a447-ebea3c11082e\") " Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.819857 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-utilities" (OuterVolumeSpecName: "utilities") pod "e7cf4621-ac21-4364-a447-ebea3c11082e" (UID: "e7cf4621-ac21-4364-a447-ebea3c11082e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.823932 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7cf4621-ac21-4364-a447-ebea3c11082e-kube-api-access-xv99j" (OuterVolumeSpecName: "kube-api-access-xv99j") pod "e7cf4621-ac21-4364-a447-ebea3c11082e" (UID: "e7cf4621-ac21-4364-a447-ebea3c11082e"). InnerVolumeSpecName "kube-api-access-xv99j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.837581 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e7cf4621-ac21-4364-a447-ebea3c11082e" (UID: "e7cf4621-ac21-4364-a447-ebea3c11082e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.920453 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xv99j\" (UniqueName: \"kubernetes.io/projected/e7cf4621-ac21-4364-a447-ebea3c11082e-kube-api-access-xv99j\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.920509 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:33 crc kubenswrapper[5014]: I1205 10:51:33.920525 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7cf4621-ac21-4364-a447-ebea3c11082e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:34 crc kubenswrapper[5014]: I1205 10:51:34.627257 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgnc6" event={"ID":"e7cf4621-ac21-4364-a447-ebea3c11082e","Type":"ContainerDied","Data":"56a71c3e6ecf424c730b3036c70109efcaf0622c586afc19b03a290b6489b75c"} Dec 05 10:51:34 crc kubenswrapper[5014]: I1205 10:51:34.627343 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dgnc6" Dec 05 10:51:34 crc kubenswrapper[5014]: I1205 10:51:34.630741 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9" exitCode=0 Dec 05 10:51:34 crc kubenswrapper[5014]: I1205 10:51:34.630779 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9"} Dec 05 10:51:34 crc kubenswrapper[5014]: I1205 10:51:34.658068 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dgnc6"] Dec 05 10:51:34 crc kubenswrapper[5014]: I1205 10:51:34.663062 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dgnc6"] Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.324708 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" path="/var/lib/kubelet/pods/e7cf4621-ac21-4364-a447-ebea3c11082e/volumes" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.344415 5014 scope.go:117] "RemoveContainer" containerID="afd2048c2f80c81ccdb7ebd36ee4d7f2f269f0b80b8c2a920cdea0fe5eacd059" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.387241 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.392554 5014 scope.go:117] "RemoveContainer" containerID="fe0da8cafc0e033377dd2b9540d3193fdf6c348b3a5d373940371a92af82bc16" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.454852 5014 scope.go:117] "RemoveContainer" containerID="a93759e111da088bfd47dbe673e2961eee6948c994ff9c722b91c69634cf411c" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.541994 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-utilities\") pod \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.542297 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkpkw\" (UniqueName: \"kubernetes.io/projected/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-kube-api-access-pkpkw\") pod \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.542513 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-catalog-content\") pod \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\" (UID: \"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f\") " Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.543175 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-utilities" (OuterVolumeSpecName: "utilities") pod "8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" (UID: "8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.548972 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-kube-api-access-pkpkw" (OuterVolumeSpecName: "kube-api-access-pkpkw") pod "8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" (UID: "8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f"). InnerVolumeSpecName "kube-api-access-pkpkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.646044 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.647106 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkpkw\" (UniqueName: \"kubernetes.io/projected/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-kube-api-access-pkpkw\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.650181 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hds86" event={"ID":"8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f","Type":"ContainerDied","Data":"1f82580f7242151ece2a883d4c80b68bde4440b3a8135c9e21bf671c1409f177"} Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.650247 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hds86" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.650286 5014 scope.go:117] "RemoveContainer" containerID="1ef02da32dbf874f09512964a7bf98a81a61571e8df63c941f0a76bf3abd2e42" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.663299 5014 scope.go:117] "RemoveContainer" containerID="c6987661e91d6480aa47ba98e96b10c79e9f92b2f0a9394a6c355dd3f70a12b6" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.677964 5014 scope.go:117] "RemoveContainer" containerID="89e271352c976f2ef131789d2ac846a59d18387b4cae87688e1bcc56cc3396f0" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.871715 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" (UID: "8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.952057 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.978535 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hds86"] Dec 05 10:51:35 crc kubenswrapper[5014]: I1205 10:51:35.981377 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hds86"] Dec 05 10:51:36 crc kubenswrapper[5014]: I1205 10:51:36.658853 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pxrjf" event={"ID":"f35c6bb2-9a29-41b5-bfeb-39e8848b095f","Type":"ContainerStarted","Data":"baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f"} Dec 05 10:51:36 crc kubenswrapper[5014]: I1205 10:51:36.662534 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"53ecb4867812cd1457c82fb4bd0d6027e193527f1ade465321174a02c10359f9"} Dec 05 10:51:36 crc kubenswrapper[5014]: I1205 10:51:36.664826 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8v5hq" event={"ID":"132d8475-31f2-4d2c-90d2-7d7739cc0fea","Type":"ContainerStarted","Data":"64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872"} Dec 05 10:51:36 crc kubenswrapper[5014]: I1205 10:51:36.667385 5014 generic.go:334] "Generic (PLEG): container finished" podID="70885ea0-025c-45b1-9999-7a44c28312ba" containerID="5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6" exitCode=0 Dec 05 10:51:36 crc kubenswrapper[5014]: I1205 10:51:36.667425 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2pv7s" event={"ID":"70885ea0-025c-45b1-9999-7a44c28312ba","Type":"ContainerDied","Data":"5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6"} Dec 05 10:51:36 crc kubenswrapper[5014]: I1205 10:51:36.678208 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pxrjf" podStartSLOduration=2.83030908 podStartE2EDuration="1m19.678187938s" podCreationTimestamp="2025-12-05 10:50:17 +0000 UTC" firstStartedPulling="2025-12-05 10:50:18.498336596 +0000 UTC m=+145.446454300" lastFinishedPulling="2025-12-05 10:51:35.346215444 +0000 UTC m=+222.294333158" observedRunningTime="2025-12-05 10:51:36.677990853 +0000 UTC m=+223.626108557" watchObservedRunningTime="2025-12-05 10:51:36.678187938 +0000 UTC m=+223.626305642" Dec 05 10:51:36 crc kubenswrapper[5014]: I1205 10:51:36.720409 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8v5hq" podStartSLOduration=7.867482059 podStartE2EDuration="1m21.720390293s" podCreationTimestamp="2025-12-05 10:50:15 +0000 UTC" firstStartedPulling="2025-12-05 10:50:18.510403025 +0000 UTC m=+145.458520729" lastFinishedPulling="2025-12-05 10:51:32.363311239 +0000 UTC m=+219.311428963" observedRunningTime="2025-12-05 10:51:36.718193419 +0000 UTC m=+223.666311143" watchObservedRunningTime="2025-12-05 10:51:36.720390293 +0000 UTC m=+223.668507997" Dec 05 10:51:37 crc kubenswrapper[5014]: I1205 10:51:37.328333 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" path="/var/lib/kubelet/pods/8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f/volumes" Dec 05 10:51:37 crc kubenswrapper[5014]: I1205 10:51:37.766977 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:51:37 crc kubenswrapper[5014]: I1205 10:51:37.767040 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:51:37 crc kubenswrapper[5014]: I1205 10:51:37.812194 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:51:38 crc kubenswrapper[5014]: I1205 10:51:38.680502 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2pv7s" event={"ID":"70885ea0-025c-45b1-9999-7a44c28312ba","Type":"ContainerStarted","Data":"f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6"} Dec 05 10:51:38 crc kubenswrapper[5014]: I1205 10:51:38.699650 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2pv7s" podStartSLOduration=4.719737457 podStartE2EDuration="1m23.699632076s" podCreationTimestamp="2025-12-05 10:50:15 +0000 UTC" firstStartedPulling="2025-12-05 10:50:18.537803376 +0000 UTC m=+145.485921090" lastFinishedPulling="2025-12-05 10:51:37.517698005 +0000 UTC m=+224.465815709" observedRunningTime="2025-12-05 10:51:38.69740749 +0000 UTC m=+225.645525214" watchObservedRunningTime="2025-12-05 10:51:38.699632076 +0000 UTC m=+225.647749790" Dec 05 10:51:39 crc kubenswrapper[5014]: I1205 10:51:39.023307 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7x7jf"] Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.298107 5014 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.298757 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab" gracePeriod=15 Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.298841 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f" gracePeriod=15 Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.298812 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a" gracePeriod=15 Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.298874 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c" gracePeriod=15 Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.298852 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b" gracePeriod=15 Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.299596 5014 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.299853 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32523fc8-6af2-488b-984c-e38d294adadd" containerName="extract-utilities" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.299870 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="32523fc8-6af2-488b-984c-e38d294adadd" containerName="extract-utilities" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.299886 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.299893 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.299902 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.299910 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.299920 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerName="extract-utilities" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.299929 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerName="extract-utilities" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.299940 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerName="registry-server" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.299948 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerName="registry-server" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.299959 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb680a2-ca73-4260-8f5f-897296700e43" containerName="pruner" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300005 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb680a2-ca73-4260-8f5f-897296700e43" containerName="pruner" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300017 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32523fc8-6af2-488b-984c-e38d294adadd" containerName="extract-content" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300024 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="32523fc8-6af2-488b-984c-e38d294adadd" containerName="extract-content" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300033 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300040 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300049 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300056 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300066 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerName="extract-content" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300074 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerName="extract-content" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300085 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32523fc8-6af2-488b-984c-e38d294adadd" containerName="registry-server" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300093 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="32523fc8-6af2-488b-984c-e38d294adadd" containerName="registry-server" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300100 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerName="extract-content" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300108 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerName="extract-content" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300118 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerName="registry-server" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300127 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerName="registry-server" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300136 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300142 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300152 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerName="extract-utilities" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300158 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerName="extract-utilities" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300169 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300176 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 10:51:41 crc kubenswrapper[5014]: E1205 10:51:41.300185 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300192 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300382 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300394 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7cf4621-ac21-4364-a447-ebea3c11082e" containerName="registry-server" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300402 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eb680a2-ca73-4260-8f5f-897296700e43" containerName="pruner" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300410 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d1ac7c3-54fd-4e58-ba1c-f2821b6d0d9f" containerName="registry-server" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300419 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300428 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300439 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300453 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="32523fc8-6af2-488b-984c-e38d294adadd" containerName="registry-server" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300462 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.300471 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.302093 5014 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.302673 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.307205 5014 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.431286 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.431332 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.431868 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.431959 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.432040 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.432073 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.432157 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.432235 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.533848 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.533989 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534314 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534255 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534371 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534402 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534432 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534450 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534470 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534477 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534502 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534521 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534501 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534450 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534551 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:41 crc kubenswrapper[5014]: I1205 10:51:41.534428 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:42 crc kubenswrapper[5014]: I1205 10:51:42.711718 5014 generic.go:334] "Generic (PLEG): container finished" podID="8576908a-678d-4f43-a530-4686764fd2cf" containerID="57b27251546cbcd68f7126efcb23a9461e4bc19c9c867b447f7dc9bf12ffd343" exitCode=0 Dec 05 10:51:42 crc kubenswrapper[5014]: I1205 10:51:42.711850 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8576908a-678d-4f43-a530-4686764fd2cf","Type":"ContainerDied","Data":"57b27251546cbcd68f7126efcb23a9461e4bc19c9c867b447f7dc9bf12ffd343"} Dec 05 10:51:42 crc kubenswrapper[5014]: I1205 10:51:42.713163 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:42 crc kubenswrapper[5014]: I1205 10:51:42.714429 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 10:51:42 crc kubenswrapper[5014]: I1205 10:51:42.716022 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 10:51:42 crc kubenswrapper[5014]: I1205 10:51:42.717249 5014 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a" exitCode=0 Dec 05 10:51:42 crc kubenswrapper[5014]: I1205 10:51:42.717306 5014 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c" exitCode=0 Dec 05 10:51:42 crc kubenswrapper[5014]: I1205 10:51:42.717323 5014 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b" exitCode=0 Dec 05 10:51:42 crc kubenswrapper[5014]: I1205 10:51:42.717336 5014 scope.go:117] "RemoveContainer" containerID="c0a6d919197efa8ab0ca88016d154564cc0842c37a2aab94e722ea4be0a2b6fc" Dec 05 10:51:42 crc kubenswrapper[5014]: I1205 10:51:42.717347 5014 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f" exitCode=2 Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.320318 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.654483 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.655743 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.656574 5014 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.656866 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.665479 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.665578 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.665602 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.665617 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.665635 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.665737 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.665901 5014 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.665918 5014 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.665928 5014 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.724307 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.725059 5014 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab" exitCode=0 Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.725144 5014 scope.go:117] "RemoveContainer" containerID="709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.725212 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.741720 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.741989 5014 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.751483 5014 scope.go:117] "RemoveContainer" containerID="2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.772469 5014 scope.go:117] "RemoveContainer" containerID="a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.791535 5014 scope.go:117] "RemoveContainer" containerID="378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.819395 5014 scope.go:117] "RemoveContainer" containerID="39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.838156 5014 scope.go:117] "RemoveContainer" containerID="1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.859462 5014 scope.go:117] "RemoveContainer" containerID="709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a" Dec 05 10:51:43 crc kubenswrapper[5014]: E1205 10:51:43.860026 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\": container with ID starting with 709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a not found: ID does not exist" containerID="709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.860067 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a"} err="failed to get container status \"709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\": rpc error: code = NotFound desc = could not find container \"709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a\": container with ID starting with 709b5f3972e09e063a6c3ddfa36356134ad041d310db185bacc6f9f1b163f69a not found: ID does not exist" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.860094 5014 scope.go:117] "RemoveContainer" containerID="2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c" Dec 05 10:51:43 crc kubenswrapper[5014]: E1205 10:51:43.860437 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\": container with ID starting with 2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c not found: ID does not exist" containerID="2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.860463 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c"} err="failed to get container status \"2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\": rpc error: code = NotFound desc = could not find container \"2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c\": container with ID starting with 2742256a1ff5a87c15776ee5638cf00bf54ff1c739e556c12bacc2cb3ba5a81c not found: ID does not exist" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.860477 5014 scope.go:117] "RemoveContainer" containerID="a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b" Dec 05 10:51:43 crc kubenswrapper[5014]: E1205 10:51:43.860736 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\": container with ID starting with a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b not found: ID does not exist" containerID="a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.860759 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b"} err="failed to get container status \"a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\": rpc error: code = NotFound desc = could not find container \"a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b\": container with ID starting with a0a575190af9346e7ff7b475827a7c1ac82ef8b818cba1f6c537f2453498703b not found: ID does not exist" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.860804 5014 scope.go:117] "RemoveContainer" containerID="378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f" Dec 05 10:51:43 crc kubenswrapper[5014]: E1205 10:51:43.861119 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\": container with ID starting with 378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f not found: ID does not exist" containerID="378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.861180 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f"} err="failed to get container status \"378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\": rpc error: code = NotFound desc = could not find container \"378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f\": container with ID starting with 378215bab8bbedb0b2781e93ee0c52e6c33d6126e6ba7f6441f5f5c3808f180f not found: ID does not exist" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.861215 5014 scope.go:117] "RemoveContainer" containerID="39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab" Dec 05 10:51:43 crc kubenswrapper[5014]: E1205 10:51:43.861919 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\": container with ID starting with 39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab not found: ID does not exist" containerID="39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.861952 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab"} err="failed to get container status \"39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\": rpc error: code = NotFound desc = could not find container \"39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab\": container with ID starting with 39ca9de99630171e88a092fedaf61d9b681b3cafe7e630a4c2e77ec418ccacab not found: ID does not exist" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.861970 5014 scope.go:117] "RemoveContainer" containerID="1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139" Dec 05 10:51:43 crc kubenswrapper[5014]: E1205 10:51:43.862169 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\": container with ID starting with 1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139 not found: ID does not exist" containerID="1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.862194 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139"} err="failed to get container status \"1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\": rpc error: code = NotFound desc = could not find container \"1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139\": container with ID starting with 1c248239410752a41080087e3a195877aa15473471c7b61103740fcc5ec33139 not found: ID does not exist" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.925517 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.926132 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.926448 5014 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.970860 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8576908a-678d-4f43-a530-4686764fd2cf-kube-api-access\") pod \"8576908a-678d-4f43-a530-4686764fd2cf\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.970930 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-var-lock\") pod \"8576908a-678d-4f43-a530-4686764fd2cf\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.971015 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-kubelet-dir\") pod \"8576908a-678d-4f43-a530-4686764fd2cf\" (UID: \"8576908a-678d-4f43-a530-4686764fd2cf\") " Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.971388 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8576908a-678d-4f43-a530-4686764fd2cf" (UID: "8576908a-678d-4f43-a530-4686764fd2cf"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.971440 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-var-lock" (OuterVolumeSpecName: "var-lock") pod "8576908a-678d-4f43-a530-4686764fd2cf" (UID: "8576908a-678d-4f43-a530-4686764fd2cf"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:51:43 crc kubenswrapper[5014]: I1205 10:51:43.975727 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8576908a-678d-4f43-a530-4686764fd2cf-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8576908a-678d-4f43-a530-4686764fd2cf" (UID: "8576908a-678d-4f43-a530-4686764fd2cf"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:51:44 crc kubenswrapper[5014]: I1205 10:51:44.072828 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8576908a-678d-4f43-a530-4686764fd2cf-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:44 crc kubenswrapper[5014]: I1205 10:51:44.072870 5014 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:44 crc kubenswrapper[5014]: I1205 10:51:44.072884 5014 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8576908a-678d-4f43-a530-4686764fd2cf-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 10:51:44 crc kubenswrapper[5014]: I1205 10:51:44.737622 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 10:51:44 crc kubenswrapper[5014]: I1205 10:51:44.738081 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8576908a-678d-4f43-a530-4686764fd2cf","Type":"ContainerDied","Data":"f2009442bdb747ee829961e6878372727b13b59e0db964027ae60df6f15a1070"} Dec 05 10:51:44 crc kubenswrapper[5014]: I1205 10:51:44.738143 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2009442bdb747ee829961e6878372727b13b59e0db964027ae60df6f15a1070" Dec 05 10:51:44 crc kubenswrapper[5014]: I1205 10:51:44.761919 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:44 crc kubenswrapper[5014]: I1205 10:51:44.762393 5014 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:45 crc kubenswrapper[5014]: I1205 10:51:45.328852 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.126943 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.127249 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.173532 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.173868 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.174202 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.268347 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.268425 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.324015 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.324761 5014 status_manager.go:851] "Failed to get status for pod" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" pod="openshift-marketplace/certified-operators-8v5hq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8v5hq\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: E1205 10:51:46.325264 5014 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.129.56.110:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.325252 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.325737 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.325901 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:46 crc kubenswrapper[5014]: E1205 10:51:46.360561 5014 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.110:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e4c37c351fae2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 10:51:46.359655138 +0000 UTC m=+233.307772842,LastTimestamp:2025-12-05 10:51:46.359655138 +0000 UTC m=+233.307772842,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.746564 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47"} Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.746635 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"97fe3ef1d02131cc4f4c0c45e2ec7e08d18fb098b5160d07e8c29469f8a467a3"} Dec 05 10:51:46 crc kubenswrapper[5014]: E1205 10:51:46.747236 5014 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.129.56.110:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.747412 5014 status_manager.go:851] "Failed to get status for pod" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" pod="openshift-marketplace/certified-operators-8v5hq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8v5hq\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.747742 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.747954 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.783590 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.783906 5014 status_manager.go:851] "Failed to get status for pod" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" pod="openshift-marketplace/certified-operators-8v5hq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8v5hq\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.784235 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.784618 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.788541 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.789399 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.789658 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:46 crc kubenswrapper[5014]: I1205 10:51:46.790031 5014 status_manager.go:851] "Failed to get status for pod" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" pod="openshift-marketplace/certified-operators-8v5hq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8v5hq\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:47 crc kubenswrapper[5014]: E1205 10:51:47.393703 5014 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:47 crc kubenswrapper[5014]: E1205 10:51:47.394555 5014 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:47 crc kubenswrapper[5014]: E1205 10:51:47.395157 5014 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:47 crc kubenswrapper[5014]: E1205 10:51:47.395752 5014 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:47 crc kubenswrapper[5014]: E1205 10:51:47.396555 5014 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:47 crc kubenswrapper[5014]: I1205 10:51:47.396601 5014 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 10:51:47 crc kubenswrapper[5014]: E1205 10:51:47.397017 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" interval="200ms" Dec 05 10:51:47 crc kubenswrapper[5014]: E1205 10:51:47.597786 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" interval="400ms" Dec 05 10:51:47 crc kubenswrapper[5014]: I1205 10:51:47.823718 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:51:47 crc kubenswrapper[5014]: I1205 10:51:47.824263 5014 status_manager.go:851] "Failed to get status for pod" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" pod="openshift-marketplace/redhat-marketplace-pxrjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pxrjf\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:47 crc kubenswrapper[5014]: I1205 10:51:47.824697 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:47 crc kubenswrapper[5014]: I1205 10:51:47.825306 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:47 crc kubenswrapper[5014]: I1205 10:51:47.825765 5014 status_manager.go:851] "Failed to get status for pod" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" pod="openshift-marketplace/certified-operators-8v5hq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8v5hq\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:47 crc kubenswrapper[5014]: E1205 10:51:47.999065 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" interval="800ms" Dec 05 10:51:48 crc kubenswrapper[5014]: E1205 10:51:48.800004 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" interval="1.6s" Dec 05 10:51:50 crc kubenswrapper[5014]: E1205 10:51:50.400769 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" interval="3.2s" Dec 05 10:51:53 crc kubenswrapper[5014]: I1205 10:51:53.322209 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:53 crc kubenswrapper[5014]: I1205 10:51:53.322581 5014 status_manager.go:851] "Failed to get status for pod" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" pod="openshift-marketplace/certified-operators-8v5hq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8v5hq\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:53 crc kubenswrapper[5014]: I1205 10:51:53.322739 5014 status_manager.go:851] "Failed to get status for pod" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" pod="openshift-marketplace/redhat-marketplace-pxrjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pxrjf\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:53 crc kubenswrapper[5014]: I1205 10:51:53.322992 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:53 crc kubenswrapper[5014]: E1205 10:51:53.602621 5014 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.110:6443: connect: connection refused" interval="6.4s" Dec 05 10:51:54 crc kubenswrapper[5014]: I1205 10:51:54.798321 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 10:51:54 crc kubenswrapper[5014]: I1205 10:51:54.798864 5014 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346" exitCode=1 Dec 05 10:51:54 crc kubenswrapper[5014]: I1205 10:51:54.798911 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346"} Dec 05 10:51:54 crc kubenswrapper[5014]: I1205 10:51:54.799549 5014 scope.go:117] "RemoveContainer" containerID="54d22a08c274a8385eb49b5537bf9611a9a898547684b165637bf5c88c909346" Dec 05 10:51:54 crc kubenswrapper[5014]: I1205 10:51:54.800854 5014 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:54 crc kubenswrapper[5014]: I1205 10:51:54.801633 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:54 crc kubenswrapper[5014]: I1205 10:51:54.802317 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:54 crc kubenswrapper[5014]: I1205 10:51:54.802897 5014 status_manager.go:851] "Failed to get status for pod" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" pod="openshift-marketplace/certified-operators-8v5hq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8v5hq\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:54 crc kubenswrapper[5014]: I1205 10:51:54.803489 5014 status_manager.go:851] "Failed to get status for pod" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" pod="openshift-marketplace/redhat-marketplace-pxrjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pxrjf\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: E1205 10:51:55.050617 5014 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.110:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e4c37c351fae2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 10:51:46.359655138 +0000 UTC m=+233.307772842,LastTimestamp:2025-12-05 10:51:46.359655138 +0000 UTC m=+233.307772842,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.317617 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.318783 5014 status_manager.go:851] "Failed to get status for pod" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" pod="openshift-marketplace/redhat-marketplace-pxrjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pxrjf\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.319225 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.319979 5014 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.320236 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.320514 5014 status_manager.go:851] "Failed to get status for pod" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" pod="openshift-marketplace/certified-operators-8v5hq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8v5hq\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.334114 5014 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.334148 5014 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:51:55 crc kubenswrapper[5014]: E1205 10:51:55.334692 5014 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.335061 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:55 crc kubenswrapper[5014]: W1205 10:51:55.355583 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-346ca1f6444b75f56f01e96051ee9bd7278709e7359b566ed3567fc0ab6f0642 WatchSource:0}: Error finding container 346ca1f6444b75f56f01e96051ee9bd7278709e7359b566ed3567fc0ab6f0642: Status 404 returned error can't find the container with id 346ca1f6444b75f56f01e96051ee9bd7278709e7359b566ed3567fc0ab6f0642 Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.665593 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.807932 5014 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="041834476edcf6f8f0dd7d6f8f40f834fb86ce861ca22c03268a5897c09b9b63" exitCode=0 Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.808023 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"041834476edcf6f8f0dd7d6f8f40f834fb86ce861ca22c03268a5897c09b9b63"} Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.808058 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"346ca1f6444b75f56f01e96051ee9bd7278709e7359b566ed3567fc0ab6f0642"} Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.808369 5014 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.808385 5014 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:51:55 crc kubenswrapper[5014]: E1205 10:51:55.809446 5014 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.809906 5014 status_manager.go:851] "Failed to get status for pod" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" pod="openshift-marketplace/certified-operators-8v5hq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8v5hq\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.810374 5014 status_manager.go:851] "Failed to get status for pod" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" pod="openshift-marketplace/redhat-marketplace-pxrjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pxrjf\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.810675 5014 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.810867 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.811066 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.811959 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.812008 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e9feb2b05ed71a39c0af79144e891221dba03ad550a460180bd2da09ccaefe0b"} Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.813155 5014 status_manager.go:851] "Failed to get status for pod" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" pod="openshift-marketplace/certified-operators-8v5hq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8v5hq\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.813480 5014 status_manager.go:851] "Failed to get status for pod" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" pod="openshift-marketplace/redhat-marketplace-pxrjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pxrjf\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.814411 5014 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.814781 5014 status_manager.go:851] "Failed to get status for pod" podUID="8576908a-678d-4f43-a530-4686764fd2cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:55 crc kubenswrapper[5014]: I1205 10:51:55.814963 5014 status_manager.go:851] "Failed to get status for pod" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" pod="openshift-marketplace/certified-operators-2pv7s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2pv7s\": dial tcp 38.129.56.110:6443: connect: connection refused" Dec 05 10:51:56 crc kubenswrapper[5014]: I1205 10:51:56.822074 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ba2d6e4a1624362a417aab66a4e3f83496b460b5f104a46242ccccc57123b005"} Dec 05 10:51:56 crc kubenswrapper[5014]: I1205 10:51:56.822461 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"fc69b097c3fb78126a0753f0600c17cba5f919509d6d5ec71b97bf6e008d4401"} Dec 05 10:51:56 crc kubenswrapper[5014]: I1205 10:51:56.822474 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"06e54dc342b4fff70721d2d68255c053cf787a7ba97cf2dcc2e16320e9fccb3e"} Dec 05 10:51:57 crc kubenswrapper[5014]: I1205 10:51:57.829809 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2ff0033d79922a921f5c40db4dc1ab1b4db94213938c4471d2726972d13fc410"} Dec 05 10:51:57 crc kubenswrapper[5014]: I1205 10:51:57.830209 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8b879176abb86ac080708906e8256a4318e3085f706ea244edd09995393f4104"} Dec 05 10:51:57 crc kubenswrapper[5014]: I1205 10:51:57.830589 5014 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:51:57 crc kubenswrapper[5014]: I1205 10:51:57.830609 5014 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:51:57 crc kubenswrapper[5014]: I1205 10:51:57.830847 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:51:59 crc kubenswrapper[5014]: I1205 10:51:59.741460 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:51:59 crc kubenswrapper[5014]: I1205 10:51:59.746700 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:51:59 crc kubenswrapper[5014]: I1205 10:51:59.843673 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:52:00 crc kubenswrapper[5014]: I1205 10:52:00.336097 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:52:00 crc kubenswrapper[5014]: I1205 10:52:00.336171 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:52:00 crc kubenswrapper[5014]: I1205 10:52:00.342043 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:52:02 crc kubenswrapper[5014]: I1205 10:52:02.843490 5014 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:52:02 crc kubenswrapper[5014]: I1205 10:52:02.868337 5014 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:52:02 crc kubenswrapper[5014]: I1205 10:52:02.868367 5014 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:52:02 crc kubenswrapper[5014]: I1205 10:52:02.872295 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:52:03 crc kubenswrapper[5014]: I1205 10:52:03.337901 5014 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fa42b0c8-6eaa-4f02-96ea-7e36bb46beb3" Dec 05 10:52:03 crc kubenswrapper[5014]: I1205 10:52:03.877159 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_71bb4a3aecc4ba5b26c4b7318770ce13/kube-apiserver-check-endpoints/0.log" Dec 05 10:52:03 crc kubenswrapper[5014]: I1205 10:52:03.879923 5014 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="2ff0033d79922a921f5c40db4dc1ab1b4db94213938c4471d2726972d13fc410" exitCode=255 Dec 05 10:52:03 crc kubenswrapper[5014]: I1205 10:52:03.879966 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"2ff0033d79922a921f5c40db4dc1ab1b4db94213938c4471d2726972d13fc410"} Dec 05 10:52:03 crc kubenswrapper[5014]: I1205 10:52:03.880244 5014 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:52:03 crc kubenswrapper[5014]: I1205 10:52:03.880267 5014 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:52:03 crc kubenswrapper[5014]: I1205 10:52:03.883061 5014 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fa42b0c8-6eaa-4f02-96ea-7e36bb46beb3" Dec 05 10:52:03 crc kubenswrapper[5014]: I1205 10:52:03.883643 5014 scope.go:117] "RemoveContainer" containerID="2ff0033d79922a921f5c40db4dc1ab1b4db94213938c4471d2726972d13fc410" Dec 05 10:52:04 crc kubenswrapper[5014]: I1205 10:52:04.053585 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" podUID="9e347737-8d07-4246-a6fd-60e7aa5bc6ab" containerName="oauth-openshift" containerID="cri-o://987ed194eaf0445fa6519fd37629de1b3378991b139df9c02aafb01578c362bf" gracePeriod=15 Dec 05 10:52:04 crc kubenswrapper[5014]: I1205 10:52:04.888686 5014 generic.go:334] "Generic (PLEG): container finished" podID="9e347737-8d07-4246-a6fd-60e7aa5bc6ab" containerID="987ed194eaf0445fa6519fd37629de1b3378991b139df9c02aafb01578c362bf" exitCode=0 Dec 05 10:52:04 crc kubenswrapper[5014]: I1205 10:52:04.888790 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" event={"ID":"9e347737-8d07-4246-a6fd-60e7aa5bc6ab","Type":"ContainerDied","Data":"987ed194eaf0445fa6519fd37629de1b3378991b139df9c02aafb01578c362bf"} Dec 05 10:52:04 crc kubenswrapper[5014]: I1205 10:52:04.891027 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_71bb4a3aecc4ba5b26c4b7318770ce13/kube-apiserver-check-endpoints/0.log" Dec 05 10:52:04 crc kubenswrapper[5014]: I1205 10:52:04.893789 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e98fda5899240b11104c95368e96f00e69c95f0b3ca0c6ab74e66347c563133e"} Dec 05 10:52:04 crc kubenswrapper[5014]: I1205 10:52:04.894022 5014 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:52:04 crc kubenswrapper[5014]: I1205 10:52:04.894040 5014 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:52:04 crc kubenswrapper[5014]: I1205 10:52:04.894205 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:52:04 crc kubenswrapper[5014]: I1205 10:52:04.897062 5014 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fa42b0c8-6eaa-4f02-96ea-7e36bb46beb3" Dec 05 10:52:04 crc kubenswrapper[5014]: I1205 10:52:04.970451 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.041600 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-router-certs\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.042949 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-policies\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.043101 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m467d\" (UniqueName: \"kubernetes.io/projected/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-kube-api-access-m467d\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.043245 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-service-ca\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.043478 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-ocp-branding-template\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.043599 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-trusted-ca-bundle\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.043696 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-dir\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.043815 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-session\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.043948 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-error\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044027 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044069 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044033 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044047 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-idp-0-file-data\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044143 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-cliconfig\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044178 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-provider-selection\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044205 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-login\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044321 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-serving-cert\") pod \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\" (UID: \"9e347737-8d07-4246-a6fd-60e7aa5bc6ab\") " Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044942 5014 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044970 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.044983 5014 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.045076 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.046588 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.049697 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.049899 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-kube-api-access-m467d" (OuterVolumeSpecName: "kube-api-access-m467d") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "kube-api-access-m467d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.050043 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.051039 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.051665 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.061518 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.061791 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.062253 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.068778 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "9e347737-8d07-4246-a6fd-60e7aa5bc6ab" (UID: "9e347737-8d07-4246-a6fd-60e7aa5bc6ab"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146326 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146364 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m467d\" (UniqueName: \"kubernetes.io/projected/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-kube-api-access-m467d\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146378 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146387 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146398 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146407 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146418 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146427 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146443 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146455 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.146465 5014 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e347737-8d07-4246-a6fd-60e7aa5bc6ab-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.672806 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.900437 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.900440 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7x7jf" event={"ID":"9e347737-8d07-4246-a6fd-60e7aa5bc6ab","Type":"ContainerDied","Data":"6f076624245b4024a5f9871a662d6b8e45a4628227c24f71c89705d774081374"} Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.900832 5014 scope.go:117] "RemoveContainer" containerID="987ed194eaf0445fa6519fd37629de1b3378991b139df9c02aafb01578c362bf" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.900551 5014 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.900908 5014 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="140b5f3a-5162-4b57-9e19-5701294f91c9" Dec 05 10:52:05 crc kubenswrapper[5014]: I1205 10:52:05.904097 5014 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fa42b0c8-6eaa-4f02-96ea-7e36bb46beb3" Dec 05 10:52:12 crc kubenswrapper[5014]: I1205 10:52:12.400514 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 10:52:12 crc kubenswrapper[5014]: I1205 10:52:12.404101 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 10:52:12 crc kubenswrapper[5014]: I1205 10:52:12.596294 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 10:52:12 crc kubenswrapper[5014]: I1205 10:52:12.609300 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 10:52:12 crc kubenswrapper[5014]: I1205 10:52:12.745915 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 10:52:12 crc kubenswrapper[5014]: I1205 10:52:12.879839 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 10:52:13 crc kubenswrapper[5014]: I1205 10:52:13.068202 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 10:52:13 crc kubenswrapper[5014]: I1205 10:52:13.248513 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 10:52:13 crc kubenswrapper[5014]: I1205 10:52:13.455431 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 10:52:13 crc kubenswrapper[5014]: I1205 10:52:13.735717 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 10:52:13 crc kubenswrapper[5014]: I1205 10:52:13.805821 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 10:52:13 crc kubenswrapper[5014]: I1205 10:52:13.918589 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 10:52:13 crc kubenswrapper[5014]: I1205 10:52:13.973448 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.172842 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.206051 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.218131 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.266830 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.317008 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.337354 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.394581 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.464236 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.496431 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.531558 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.622369 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.632256 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.687343 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 10:52:14 crc kubenswrapper[5014]: I1205 10:52:14.856492 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.005148 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.025586 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.177073 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.203423 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.344681 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.422871 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.587038 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.678762 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.780786 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.865110 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.922060 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.980081 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.992460 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 10:52:15 crc kubenswrapper[5014]: I1205 10:52:15.992906 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.128513 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.189083 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.304748 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.389067 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.417169 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.679356 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.692670 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.791805 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.803663 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.823020 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.895376 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.902790 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.921049 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 10:52:16 crc kubenswrapper[5014]: I1205 10:52:16.993968 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 10:52:17 crc kubenswrapper[5014]: I1205 10:52:17.000992 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 10:52:17 crc kubenswrapper[5014]: I1205 10:52:17.093448 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 10:52:17 crc kubenswrapper[5014]: I1205 10:52:17.359143 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 10:52:17 crc kubenswrapper[5014]: I1205 10:52:17.420901 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 10:52:17 crc kubenswrapper[5014]: I1205 10:52:17.425052 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 10:52:17 crc kubenswrapper[5014]: I1205 10:52:17.563878 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 10:52:17 crc kubenswrapper[5014]: I1205 10:52:17.682853 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 10:52:17 crc kubenswrapper[5014]: I1205 10:52:17.862857 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 10:52:17 crc kubenswrapper[5014]: I1205 10:52:17.916142 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 10:52:17 crc kubenswrapper[5014]: I1205 10:52:17.939868 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.095414 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.164112 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.173024 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.186391 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.214604 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.245212 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.300398 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.300723 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.356319 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.480601 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.520165 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.575977 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.621791 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.632371 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.684545 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.712425 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.793923 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 10:52:18 crc kubenswrapper[5014]: I1205 10:52:18.811105 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.069379 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.117684 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.171325 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.185104 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.218245 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.246492 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.285339 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.339522 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.407681 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.530101 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.632393 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.633038 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.830586 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.919040 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.951860 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 10:52:19 crc kubenswrapper[5014]: I1205 10:52:19.976020 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.007756 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.043765 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.130405 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.171117 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.172420 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.282624 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.364425 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.364635 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.455211 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.521534 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.744446 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.748194 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.748731 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.847501 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 10:52:20 crc kubenswrapper[5014]: I1205 10:52:20.988556 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.057204 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.058506 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.082386 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.089707 5014 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.094957 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-7x7jf"] Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.095032 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.098722 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.099194 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.126444 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.141513 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=19.141489411 podStartE2EDuration="19.141489411s" podCreationTimestamp="2025-12-05 10:52:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:52:21.121665889 +0000 UTC m=+268.069783593" watchObservedRunningTime="2025-12-05 10:52:21.141489411 +0000 UTC m=+268.089607135" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.222833 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.227012 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.283681 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.330076 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e347737-8d07-4246-a6fd-60e7aa5bc6ab" path="/var/lib/kubelet/pods/9e347737-8d07-4246-a6fd-60e7aa5bc6ab/volumes" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.347197 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-8f56ccf5-xdnql"] Dec 05 10:52:21 crc kubenswrapper[5014]: E1205 10:52:21.347495 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8576908a-678d-4f43-a530-4686764fd2cf" containerName="installer" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.347512 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="8576908a-678d-4f43-a530-4686764fd2cf" containerName="installer" Dec 05 10:52:21 crc kubenswrapper[5014]: E1205 10:52:21.347526 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e347737-8d07-4246-a6fd-60e7aa5bc6ab" containerName="oauth-openshift" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.347533 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e347737-8d07-4246-a6fd-60e7aa5bc6ab" containerName="oauth-openshift" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.347646 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e347737-8d07-4246-a6fd-60e7aa5bc6ab" containerName="oauth-openshift" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.347667 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="8576908a-678d-4f43-a530-4686764fd2cf" containerName="installer" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.348108 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.353305 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.354010 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.354951 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.355324 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.356382 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.356410 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.356837 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.356961 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.357267 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.357598 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.358216 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.358615 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.369606 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.374466 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.390442 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.404150 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.410406 5014 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.465367 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.465804 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.465972 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-service-ca\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.466117 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2794b522-2d64-4c40-9475-965e27634e7e-audit-dir\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.466421 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.466658 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbs6b\" (UniqueName: \"kubernetes.io/projected/2794b522-2d64-4c40-9475-965e27634e7e-kube-api-access-wbs6b\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.466809 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-router-certs\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.467217 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.467386 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-template-login\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.467529 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-session\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.467661 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-audit-policies\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.467830 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.467991 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-template-error\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.468123 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.533080 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570130 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-audit-policies\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570573 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570658 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-template-error\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570689 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570740 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570771 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570810 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-service-ca\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570845 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2794b522-2d64-4c40-9475-965e27634e7e-audit-dir\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570882 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570920 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbs6b\" (UniqueName: \"kubernetes.io/projected/2794b522-2d64-4c40-9475-965e27634e7e-kube-api-access-wbs6b\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.570967 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-router-certs\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.571010 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.571052 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-template-login\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.571093 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-session\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.571828 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-audit-policies\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.571966 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2794b522-2d64-4c40-9475-965e27634e7e-audit-dir\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.572504 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.572864 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-service-ca\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.573012 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.579965 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.580427 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-router-certs\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.580619 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.580780 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-template-error\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.581759 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.582951 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-user-template-login\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.584973 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-session\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.587853 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2794b522-2d64-4c40-9475-965e27634e7e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.596109 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbs6b\" (UniqueName: \"kubernetes.io/projected/2794b522-2d64-4c40-9475-965e27634e7e-kube-api-access-wbs6b\") pod \"oauth-openshift-8f56ccf5-xdnql\" (UID: \"2794b522-2d64-4c40-9475-965e27634e7e\") " pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.636435 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.655354 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.669623 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.672483 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.798560 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.805343 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.824577 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.826383 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.904646 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 10:52:21 crc kubenswrapper[5014]: I1205 10:52:21.968663 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.080890 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.137079 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.261544 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-8f56ccf5-xdnql"] Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.370367 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.426024 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.450136 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.463569 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.478578 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.482569 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.494621 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.508176 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.528253 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.633460 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.689612 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.693441 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.698010 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.699428 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-8f56ccf5-xdnql"] Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.720489 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.737829 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.776406 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.794896 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.797072 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.873808 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.941209 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 10:52:22 crc kubenswrapper[5014]: I1205 10:52:22.980342 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.016991 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" event={"ID":"2794b522-2d64-4c40-9475-965e27634e7e","Type":"ContainerStarted","Data":"8a21d1ff33ac232063451233f0b1b602e101e32e76fcfe2e2cdf1947f78bb505"} Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.017039 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" event={"ID":"2794b522-2d64-4c40-9475-965e27634e7e","Type":"ContainerStarted","Data":"969c4dee2a20b837e573adb3090f2d9db59c1143ef207cda48bdf40b18db939b"} Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.017536 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.037489 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" podStartSLOduration=44.037475857 podStartE2EDuration="44.037475857s" podCreationTimestamp="2025-12-05 10:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:52:23.03558175 +0000 UTC m=+269.983699484" watchObservedRunningTime="2025-12-05 10:52:23.037475857 +0000 UTC m=+269.985593561" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.071695 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.099015 5014 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.119011 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.163390 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.172970 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.187693 5014 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.235516 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.325028 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.380258 5014 patch_prober.go:28] interesting pod/oauth-openshift-8f56ccf5-xdnql container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.56:6443/healthz\": read tcp 10.217.0.2:56974->10.217.0.56:6443: read: connection reset by peer" start-of-body= Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.380377 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" podUID="2794b522-2d64-4c40-9475-965e27634e7e" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.56:6443/healthz\": read tcp 10.217.0.2:56974->10.217.0.56:6443: read: connection reset by peer" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.590247 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.629828 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.841931 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.965902 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 10:52:23 crc kubenswrapper[5014]: I1205 10:52:23.999912 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.022499 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-8f56ccf5-xdnql_2794b522-2d64-4c40-9475-965e27634e7e/oauth-openshift/0.log" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.022538 5014 generic.go:334] "Generic (PLEG): container finished" podID="2794b522-2d64-4c40-9475-965e27634e7e" containerID="8a21d1ff33ac232063451233f0b1b602e101e32e76fcfe2e2cdf1947f78bb505" exitCode=255 Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.022565 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" event={"ID":"2794b522-2d64-4c40-9475-965e27634e7e","Type":"ContainerDied","Data":"8a21d1ff33ac232063451233f0b1b602e101e32e76fcfe2e2cdf1947f78bb505"} Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.023005 5014 scope.go:117] "RemoveContainer" containerID="8a21d1ff33ac232063451233f0b1b602e101e32e76fcfe2e2cdf1947f78bb505" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.036947 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.066610 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.114433 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.199748 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.224121 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.238826 5014 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.262597 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.307389 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.309764 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.330733 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.495651 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.596248 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.603086 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.608352 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.625640 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.639299 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.720672 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.720938 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.737522 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.750066 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.862345 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.896724 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 10:52:24 crc kubenswrapper[5014]: I1205 10:52:24.950846 5014 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.001053 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.036669 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-8f56ccf5-xdnql_2794b522-2d64-4c40-9475-965e27634e7e/oauth-openshift/1.log" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.039082 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-8f56ccf5-xdnql_2794b522-2d64-4c40-9475-965e27634e7e/oauth-openshift/0.log" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.039171 5014 generic.go:334] "Generic (PLEG): container finished" podID="2794b522-2d64-4c40-9475-965e27634e7e" containerID="ab6bfe31903898c2e16313b503fa2565b0e59aaec236252e099eae80a6975a6a" exitCode=255 Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.039225 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" event={"ID":"2794b522-2d64-4c40-9475-965e27634e7e","Type":"ContainerDied","Data":"ab6bfe31903898c2e16313b503fa2565b0e59aaec236252e099eae80a6975a6a"} Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.039302 5014 scope.go:117] "RemoveContainer" containerID="8a21d1ff33ac232063451233f0b1b602e101e32e76fcfe2e2cdf1947f78bb505" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.040116 5014 scope.go:117] "RemoveContainer" containerID="ab6bfe31903898c2e16313b503fa2565b0e59aaec236252e099eae80a6975a6a" Dec 05 10:52:25 crc kubenswrapper[5014]: E1205 10:52:25.040626 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-8f56ccf5-xdnql_openshift-authentication(2794b522-2d64-4c40-9475-965e27634e7e)\"" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" podUID="2794b522-2d64-4c40-9475-965e27634e7e" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.068954 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.097427 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.133442 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.155119 5014 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.155474 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47" gracePeriod=5 Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.301223 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.410451 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.459036 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.556129 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.774083 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 10:52:25 crc kubenswrapper[5014]: I1205 10:52:25.800595 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.022637 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.036185 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.045518 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-8f56ccf5-xdnql_2794b522-2d64-4c40-9475-965e27634e7e/oauth-openshift/1.log" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.046048 5014 scope.go:117] "RemoveContainer" containerID="ab6bfe31903898c2e16313b503fa2565b0e59aaec236252e099eae80a6975a6a" Dec 05 10:52:26 crc kubenswrapper[5014]: E1205 10:52:26.046296 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-8f56ccf5-xdnql_openshift-authentication(2794b522-2d64-4c40-9475-965e27634e7e)\"" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" podUID="2794b522-2d64-4c40-9475-965e27634e7e" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.056036 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.095403 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.186413 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.200821 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.205965 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.215751 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.233488 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.282296 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.386679 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.449347 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.456231 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.697132 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.705364 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.738444 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.742871 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.761973 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.913838 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.959386 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 10:52:26 crc kubenswrapper[5014]: I1205 10:52:26.999597 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 10:52:27 crc kubenswrapper[5014]: I1205 10:52:27.017788 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 10:52:27 crc kubenswrapper[5014]: I1205 10:52:27.292813 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 10:52:27 crc kubenswrapper[5014]: I1205 10:52:27.546137 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 10:52:27 crc kubenswrapper[5014]: I1205 10:52:27.547122 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 10:52:27 crc kubenswrapper[5014]: I1205 10:52:27.724532 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 10:52:28 crc kubenswrapper[5014]: I1205 10:52:28.055488 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 10:52:28 crc kubenswrapper[5014]: I1205 10:52:28.721381 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 10:52:28 crc kubenswrapper[5014]: I1205 10:52:28.967188 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 10:52:29 crc kubenswrapper[5014]: I1205 10:52:29.029908 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 10:52:29 crc kubenswrapper[5014]: I1205 10:52:29.333408 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 10:52:30 crc kubenswrapper[5014]: I1205 10:52:30.425776 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 10:52:30 crc kubenswrapper[5014]: I1205 10:52:30.969833 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 10:52:30 crc kubenswrapper[5014]: I1205 10:52:30.969903 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.075188 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.075256 5014 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47" exitCode=137 Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.075326 5014 scope.go:117] "RemoveContainer" containerID="64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.075526 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.099300 5014 scope.go:117] "RemoveContainer" containerID="64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47" Dec 05 10:52:31 crc kubenswrapper[5014]: E1205 10:52:31.099669 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47\": container with ID starting with 64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47 not found: ID does not exist" containerID="64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.099717 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47"} err="failed to get container status \"64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47\": rpc error: code = NotFound desc = could not find container \"64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47\": container with ID starting with 64735f6aaa898f40e7c737fb50bbaac2e23c8c018f4c19248c8bafbc78874d47 not found: ID does not exist" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146123 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146205 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146330 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146329 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146368 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146398 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146407 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146428 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146533 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146670 5014 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146688 5014 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146702 5014 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.146713 5014 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.158394 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.247937 5014 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.327447 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.670587 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.670680 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:31 crc kubenswrapper[5014]: I1205 10:52:31.672414 5014 scope.go:117] "RemoveContainer" containerID="ab6bfe31903898c2e16313b503fa2565b0e59aaec236252e099eae80a6975a6a" Dec 05 10:52:31 crc kubenswrapper[5014]: E1205 10:52:31.672871 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-8f56ccf5-xdnql_openshift-authentication(2794b522-2d64-4c40-9475-965e27634e7e)\"" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" podUID="2794b522-2d64-4c40-9475-965e27634e7e" Dec 05 10:52:43 crc kubenswrapper[5014]: I1205 10:52:43.324470 5014 scope.go:117] "RemoveContainer" containerID="ab6bfe31903898c2e16313b503fa2565b0e59aaec236252e099eae80a6975a6a" Dec 05 10:52:44 crc kubenswrapper[5014]: I1205 10:52:44.164189 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-8f56ccf5-xdnql_2794b522-2d64-4c40-9475-965e27634e7e/oauth-openshift/1.log" Dec 05 10:52:44 crc kubenswrapper[5014]: I1205 10:52:44.164906 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" event={"ID":"2794b522-2d64-4c40-9475-965e27634e7e","Type":"ContainerStarted","Data":"2bc3df7ed9720c5d256cf9c5c66c215c607319931371ec472daefa478e00184e"} Dec 05 10:52:44 crc kubenswrapper[5014]: I1205 10:52:44.165444 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:44 crc kubenswrapper[5014]: I1205 10:52:44.257783 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-8f56ccf5-xdnql" Dec 05 10:52:47 crc kubenswrapper[5014]: I1205 10:52:47.384055 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 10:52:48 crc kubenswrapper[5014]: I1205 10:52:48.189506 5014 generic.go:334] "Generic (PLEG): container finished" podID="6d447c3b-5da9-443c-aeff-aa202692a222" containerID="be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826" exitCode=0 Dec 05 10:52:48 crc kubenswrapper[5014]: I1205 10:52:48.189875 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" event={"ID":"6d447c3b-5da9-443c-aeff-aa202692a222","Type":"ContainerDied","Data":"be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826"} Dec 05 10:52:48 crc kubenswrapper[5014]: I1205 10:52:48.190659 5014 scope.go:117] "RemoveContainer" containerID="be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826" Dec 05 10:52:49 crc kubenswrapper[5014]: I1205 10:52:49.199173 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" event={"ID":"6d447c3b-5da9-443c-aeff-aa202692a222","Type":"ContainerStarted","Data":"2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f"} Dec 05 10:52:49 crc kubenswrapper[5014]: I1205 10:52:49.199834 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:52:49 crc kubenswrapper[5014]: I1205 10:52:49.203463 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:52:53 crc kubenswrapper[5014]: I1205 10:52:53.182670 5014 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.355426 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5wnsv"] Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.358692 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" podUID="6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" containerName="controller-manager" containerID="cri-o://649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580" gracePeriod=30 Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.444389 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m"] Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.444617 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" podUID="61134bd8-7840-4ba7-8ec8-02e41ed425cb" containerName="route-controller-manager" containerID="cri-o://76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06" gracePeriod=30 Dec 05 10:52:56 crc kubenswrapper[5014]: E1205 10:52:56.599872 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61134bd8_7840_4ba7_8ec8_02e41ed425cb.slice/crio-conmon-76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61134bd8_7840_4ba7_8ec8_02e41ed425cb.slice/crio-76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06.scope\": RecentStats: unable to find data in memory cache]" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.779697 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.813529 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.886456 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-config\") pod \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.886542 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-serving-cert\") pod \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.886649 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-client-ca\") pod \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.886693 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-client-ca\") pod \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.886719 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-config\") pod \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.886759 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zfzv\" (UniqueName: \"kubernetes.io/projected/61134bd8-7840-4ba7-8ec8-02e41ed425cb-kube-api-access-5zfzv\") pod \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.886789 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-proxy-ca-bundles\") pod \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.886807 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znthd\" (UniqueName: \"kubernetes.io/projected/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-kube-api-access-znthd\") pod \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\" (UID: \"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb\") " Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.886836 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61134bd8-7840-4ba7-8ec8-02e41ed425cb-serving-cert\") pod \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\" (UID: \"61134bd8-7840-4ba7-8ec8-02e41ed425cb\") " Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.887490 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" (UID: "6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.887607 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-config" (OuterVolumeSpecName: "config") pod "6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" (UID: "6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.887899 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-config" (OuterVolumeSpecName: "config") pod "61134bd8-7840-4ba7-8ec8-02e41ed425cb" (UID: "61134bd8-7840-4ba7-8ec8-02e41ed425cb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.887935 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-client-ca" (OuterVolumeSpecName: "client-ca") pod "61134bd8-7840-4ba7-8ec8-02e41ed425cb" (UID: "61134bd8-7840-4ba7-8ec8-02e41ed425cb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.888212 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-client-ca" (OuterVolumeSpecName: "client-ca") pod "6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" (UID: "6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.891979 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" (UID: "6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.892094 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61134bd8-7840-4ba7-8ec8-02e41ed425cb-kube-api-access-5zfzv" (OuterVolumeSpecName: "kube-api-access-5zfzv") pod "61134bd8-7840-4ba7-8ec8-02e41ed425cb" (UID: "61134bd8-7840-4ba7-8ec8-02e41ed425cb"). InnerVolumeSpecName "kube-api-access-5zfzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.892660 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61134bd8-7840-4ba7-8ec8-02e41ed425cb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "61134bd8-7840-4ba7-8ec8-02e41ed425cb" (UID: "61134bd8-7840-4ba7-8ec8-02e41ed425cb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.892727 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-kube-api-access-znthd" (OuterVolumeSpecName: "kube-api-access-znthd") pod "6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" (UID: "6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb"). InnerVolumeSpecName "kube-api-access-znthd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.988129 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zfzv\" (UniqueName: \"kubernetes.io/projected/61134bd8-7840-4ba7-8ec8-02e41ed425cb-kube-api-access-5zfzv\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.988170 5014 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.988180 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znthd\" (UniqueName: \"kubernetes.io/projected/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-kube-api-access-znthd\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.988191 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61134bd8-7840-4ba7-8ec8-02e41ed425cb-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.988202 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.988212 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.988223 5014 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.988233 5014 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:56 crc kubenswrapper[5014]: I1205 10:52:56.988242 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61134bd8-7840-4ba7-8ec8-02e41ed425cb-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.246858 5014 generic.go:334] "Generic (PLEG): container finished" podID="61134bd8-7840-4ba7-8ec8-02e41ed425cb" containerID="76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06" exitCode=0 Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.246932 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" event={"ID":"61134bd8-7840-4ba7-8ec8-02e41ed425cb","Type":"ContainerDied","Data":"76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06"} Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.246941 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.246962 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m" event={"ID":"61134bd8-7840-4ba7-8ec8-02e41ed425cb","Type":"ContainerDied","Data":"e41701387477810a7a18b71773580232ace045f7b372d5c890845fa7263ccbf7"} Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.246985 5014 scope.go:117] "RemoveContainer" containerID="76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.250097 5014 generic.go:334] "Generic (PLEG): container finished" podID="6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" containerID="649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580" exitCode=0 Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.250139 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" event={"ID":"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb","Type":"ContainerDied","Data":"649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580"} Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.250180 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" event={"ID":"6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb","Type":"ContainerDied","Data":"300566bfc3f70fc0dab9aac701f2b812d894b82cf3393c421f3a95923cc85f6b"} Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.250511 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5wnsv" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.265721 5014 scope.go:117] "RemoveContainer" containerID="76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06" Dec 05 10:52:57 crc kubenswrapper[5014]: E1205 10:52:57.266743 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06\": container with ID starting with 76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06 not found: ID does not exist" containerID="76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.266852 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06"} err="failed to get container status \"76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06\": rpc error: code = NotFound desc = could not find container \"76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06\": container with ID starting with 76fc11a0d1aa55dc37fcd814eb6a0572cb162d22b020e40b7eab69f48d963c06 not found: ID does not exist" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.266910 5014 scope.go:117] "RemoveContainer" containerID="649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.286803 5014 scope.go:117] "RemoveContainer" containerID="649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580" Dec 05 10:52:57 crc kubenswrapper[5014]: E1205 10:52:57.287236 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580\": container with ID starting with 649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580 not found: ID does not exist" containerID="649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.287293 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580"} err="failed to get container status \"649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580\": rpc error: code = NotFound desc = could not find container \"649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580\": container with ID starting with 649a1d8a002f0ace864f1de7de1de836118fe87f609f5e09e1b3dc0ed9563580 not found: ID does not exist" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.307463 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m"] Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.311265 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xzr9m"] Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.314318 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5wnsv"] Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.324003 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61134bd8-7840-4ba7-8ec8-02e41ed425cb" path="/var/lib/kubelet/pods/61134bd8-7840-4ba7-8ec8-02e41ed425cb/volumes" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.324519 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5wnsv"] Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.975228 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6b66565f65-w4gx4"] Dec 05 10:52:57 crc kubenswrapper[5014]: E1205 10:52:57.975935 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" containerName="controller-manager" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.975957 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" containerName="controller-manager" Dec 05 10:52:57 crc kubenswrapper[5014]: E1205 10:52:57.975991 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.976003 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 10:52:57 crc kubenswrapper[5014]: E1205 10:52:57.976021 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61134bd8-7840-4ba7-8ec8-02e41ed425cb" containerName="route-controller-manager" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.976034 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="61134bd8-7840-4ba7-8ec8-02e41ed425cb" containerName="route-controller-manager" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.976259 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.976303 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" containerName="controller-manager" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.976327 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="61134bd8-7840-4ba7-8ec8-02e41ed425cb" containerName="route-controller-manager" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.976910 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.980410 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g"] Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.980528 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.981203 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.981313 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.981347 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.981456 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.981581 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.982513 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.983609 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.983754 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.985952 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.985997 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.986890 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.987066 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.990443 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g"] Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.993171 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 10:52:57 crc kubenswrapper[5014]: I1205 10:52:57.994286 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6b66565f65-w4gx4"] Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.003556 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-proxy-ca-bundles\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.003619 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-client-ca\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.003643 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxdfg\" (UniqueName: \"kubernetes.io/projected/3dc41473-9ee4-4078-941a-35bebe92cb8f-kube-api-access-pxdfg\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.003662 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-client-ca\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.003738 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3dc41473-9ee4-4078-941a-35bebe92cb8f-serving-cert\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.003787 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-config\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.003806 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-config\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.003865 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4f04b1a-75c8-4d3c-b941-f479295fc73f-serving-cert\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.003887 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfk2c\" (UniqueName: \"kubernetes.io/projected/f4f04b1a-75c8-4d3c-b941-f479295fc73f-kube-api-access-lfk2c\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.105415 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-proxy-ca-bundles\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.105473 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-client-ca\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.105492 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxdfg\" (UniqueName: \"kubernetes.io/projected/3dc41473-9ee4-4078-941a-35bebe92cb8f-kube-api-access-pxdfg\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.105514 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-client-ca\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.105571 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3dc41473-9ee4-4078-941a-35bebe92cb8f-serving-cert\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.105601 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-config\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.105618 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-config\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.105649 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4f04b1a-75c8-4d3c-b941-f479295fc73f-serving-cert\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.105671 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfk2c\" (UniqueName: \"kubernetes.io/projected/f4f04b1a-75c8-4d3c-b941-f479295fc73f-kube-api-access-lfk2c\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.107498 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-client-ca\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.107516 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-client-ca\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.107578 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-config\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.107596 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-proxy-ca-bundles\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.107982 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-config\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.112256 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3dc41473-9ee4-4078-941a-35bebe92cb8f-serving-cert\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.124054 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxdfg\" (UniqueName: \"kubernetes.io/projected/3dc41473-9ee4-4078-941a-35bebe92cb8f-kube-api-access-pxdfg\") pod \"route-controller-manager-7b84f6c8c-cj47g\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.125833 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4f04b1a-75c8-4d3c-b941-f479295fc73f-serving-cert\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.126019 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfk2c\" (UniqueName: \"kubernetes.io/projected/f4f04b1a-75c8-4d3c-b941-f479295fc73f-kube-api-access-lfk2c\") pod \"controller-manager-6b66565f65-w4gx4\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.320372 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:52:58 crc kubenswrapper[5014]: I1205 10:52:58.328560 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:52:59 crc kubenswrapper[5014]: I1205 10:52:59.132388 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6b66565f65-w4gx4"] Dec 05 10:52:59 crc kubenswrapper[5014]: I1205 10:52:59.196371 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g"] Dec 05 10:52:59 crc kubenswrapper[5014]: W1205 10:52:59.206880 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dc41473_9ee4_4078_941a_35bebe92cb8f.slice/crio-85c64a61885ab0022cbf9df49bd0c086ce2050988eae7928c8206104d7ec59ae WatchSource:0}: Error finding container 85c64a61885ab0022cbf9df49bd0c086ce2050988eae7928c8206104d7ec59ae: Status 404 returned error can't find the container with id 85c64a61885ab0022cbf9df49bd0c086ce2050988eae7928c8206104d7ec59ae Dec 05 10:52:59 crc kubenswrapper[5014]: I1205 10:52:59.267265 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" event={"ID":"f4f04b1a-75c8-4d3c-b941-f479295fc73f","Type":"ContainerStarted","Data":"b440e5a3fdd02eb3f957de3af43591a35ef10e9fd50b91449781a772e68f642c"} Dec 05 10:52:59 crc kubenswrapper[5014]: I1205 10:52:59.268416 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" event={"ID":"3dc41473-9ee4-4078-941a-35bebe92cb8f","Type":"ContainerStarted","Data":"85c64a61885ab0022cbf9df49bd0c086ce2050988eae7928c8206104d7ec59ae"} Dec 05 10:52:59 crc kubenswrapper[5014]: I1205 10:52:59.323892 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb" path="/var/lib/kubelet/pods/6f34f27e-3e6e-43e7-9f9b-8b5d4c224efb/volumes" Dec 05 10:53:00 crc kubenswrapper[5014]: I1205 10:53:00.280348 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" event={"ID":"f4f04b1a-75c8-4d3c-b941-f479295fc73f","Type":"ContainerStarted","Data":"dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b"} Dec 05 10:53:00 crc kubenswrapper[5014]: I1205 10:53:00.280576 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:53:00 crc kubenswrapper[5014]: I1205 10:53:00.282863 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" event={"ID":"3dc41473-9ee4-4078-941a-35bebe92cb8f","Type":"ContainerStarted","Data":"3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9"} Dec 05 10:53:00 crc kubenswrapper[5014]: I1205 10:53:00.284464 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:53:00 crc kubenswrapper[5014]: I1205 10:53:00.285949 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:53:00 crc kubenswrapper[5014]: I1205 10:53:00.291574 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:53:00 crc kubenswrapper[5014]: I1205 10:53:00.303511 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" podStartSLOduration=4.303481346 podStartE2EDuration="4.303481346s" podCreationTimestamp="2025-12-05 10:52:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:53:00.299214828 +0000 UTC m=+307.247332552" watchObservedRunningTime="2025-12-05 10:53:00.303481346 +0000 UTC m=+307.251599050" Dec 05 10:53:24 crc kubenswrapper[5014]: I1205 10:53:24.430427 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" podStartSLOduration=28.430408339 podStartE2EDuration="28.430408339s" podCreationTimestamp="2025-12-05 10:52:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:53:00.336292429 +0000 UTC m=+307.284410143" watchObservedRunningTime="2025-12-05 10:53:24.430408339 +0000 UTC m=+331.378526053" Dec 05 10:53:24 crc kubenswrapper[5014]: I1205 10:53:24.435429 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6b66565f65-w4gx4"] Dec 05 10:53:24 crc kubenswrapper[5014]: I1205 10:53:24.435685 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" podUID="f4f04b1a-75c8-4d3c-b941-f479295fc73f" containerName="controller-manager" containerID="cri-o://dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b" gracePeriod=30 Dec 05 10:53:24 crc kubenswrapper[5014]: I1205 10:53:24.449732 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g"] Dec 05 10:53:24 crc kubenswrapper[5014]: I1205 10:53:24.449965 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" podUID="3dc41473-9ee4-4078-941a-35bebe92cb8f" containerName="route-controller-manager" containerID="cri-o://3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9" gracePeriod=30 Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.037131 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.044054 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.142090 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxdfg\" (UniqueName: \"kubernetes.io/projected/3dc41473-9ee4-4078-941a-35bebe92cb8f-kube-api-access-pxdfg\") pod \"3dc41473-9ee4-4078-941a-35bebe92cb8f\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.142380 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4f04b1a-75c8-4d3c-b941-f479295fc73f-serving-cert\") pod \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.142452 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3dc41473-9ee4-4078-941a-35bebe92cb8f-serving-cert\") pod \"3dc41473-9ee4-4078-941a-35bebe92cb8f\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.142497 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-proxy-ca-bundles\") pod \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.142544 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-client-ca\") pod \"3dc41473-9ee4-4078-941a-35bebe92cb8f\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.142571 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-config\") pod \"3dc41473-9ee4-4078-941a-35bebe92cb8f\" (UID: \"3dc41473-9ee4-4078-941a-35bebe92cb8f\") " Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.143636 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f4f04b1a-75c8-4d3c-b941-f479295fc73f" (UID: "f4f04b1a-75c8-4d3c-b941-f479295fc73f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.143650 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-client-ca" (OuterVolumeSpecName: "client-ca") pod "3dc41473-9ee4-4078-941a-35bebe92cb8f" (UID: "3dc41473-9ee4-4078-941a-35bebe92cb8f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.143709 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-config" (OuterVolumeSpecName: "config") pod "3dc41473-9ee4-4078-941a-35bebe92cb8f" (UID: "3dc41473-9ee4-4078-941a-35bebe92cb8f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.144429 5014 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.144447 5014 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.144457 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dc41473-9ee4-4078-941a-35bebe92cb8f-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.149515 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dc41473-9ee4-4078-941a-35bebe92cb8f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3dc41473-9ee4-4078-941a-35bebe92cb8f" (UID: "3dc41473-9ee4-4078-941a-35bebe92cb8f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.150019 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f04b1a-75c8-4d3c-b941-f479295fc73f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f4f04b1a-75c8-4d3c-b941-f479295fc73f" (UID: "f4f04b1a-75c8-4d3c-b941-f479295fc73f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.150080 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dc41473-9ee4-4078-941a-35bebe92cb8f-kube-api-access-pxdfg" (OuterVolumeSpecName: "kube-api-access-pxdfg") pod "3dc41473-9ee4-4078-941a-35bebe92cb8f" (UID: "3dc41473-9ee4-4078-941a-35bebe92cb8f"). InnerVolumeSpecName "kube-api-access-pxdfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.245192 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfk2c\" (UniqueName: \"kubernetes.io/projected/f4f04b1a-75c8-4d3c-b941-f479295fc73f-kube-api-access-lfk2c\") pod \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.245312 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-client-ca\") pod \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.245420 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-config\") pod \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\" (UID: \"f4f04b1a-75c8-4d3c-b941-f479295fc73f\") " Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.245805 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-client-ca" (OuterVolumeSpecName: "client-ca") pod "f4f04b1a-75c8-4d3c-b941-f479295fc73f" (UID: "f4f04b1a-75c8-4d3c-b941-f479295fc73f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.246635 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-config" (OuterVolumeSpecName: "config") pod "f4f04b1a-75c8-4d3c-b941-f479295fc73f" (UID: "f4f04b1a-75c8-4d3c-b941-f479295fc73f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.246954 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.247004 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxdfg\" (UniqueName: \"kubernetes.io/projected/3dc41473-9ee4-4078-941a-35bebe92cb8f-kube-api-access-pxdfg\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.247038 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4f04b1a-75c8-4d3c-b941-f479295fc73f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.247067 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3dc41473-9ee4-4078-941a-35bebe92cb8f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.247093 5014 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4f04b1a-75c8-4d3c-b941-f479295fc73f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.247905 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4f04b1a-75c8-4d3c-b941-f479295fc73f-kube-api-access-lfk2c" (OuterVolumeSpecName: "kube-api-access-lfk2c") pod "f4f04b1a-75c8-4d3c-b941-f479295fc73f" (UID: "f4f04b1a-75c8-4d3c-b941-f479295fc73f"). InnerVolumeSpecName "kube-api-access-lfk2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.348981 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfk2c\" (UniqueName: \"kubernetes.io/projected/f4f04b1a-75c8-4d3c-b941-f479295fc73f-kube-api-access-lfk2c\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.430940 5014 generic.go:334] "Generic (PLEG): container finished" podID="f4f04b1a-75c8-4d3c-b941-f479295fc73f" containerID="dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b" exitCode=0 Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.431110 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.431175 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" event={"ID":"f4f04b1a-75c8-4d3c-b941-f479295fc73f","Type":"ContainerDied","Data":"dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b"} Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.431257 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b66565f65-w4gx4" event={"ID":"f4f04b1a-75c8-4d3c-b941-f479295fc73f","Type":"ContainerDied","Data":"b440e5a3fdd02eb3f957de3af43591a35ef10e9fd50b91449781a772e68f642c"} Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.431332 5014 scope.go:117] "RemoveContainer" containerID="dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.436732 5014 generic.go:334] "Generic (PLEG): container finished" podID="3dc41473-9ee4-4078-941a-35bebe92cb8f" containerID="3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9" exitCode=0 Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.436799 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" event={"ID":"3dc41473-9ee4-4078-941a-35bebe92cb8f","Type":"ContainerDied","Data":"3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9"} Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.436839 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.436850 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g" event={"ID":"3dc41473-9ee4-4078-941a-35bebe92cb8f","Type":"ContainerDied","Data":"85c64a61885ab0022cbf9df49bd0c086ce2050988eae7928c8206104d7ec59ae"} Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.457841 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g"] Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.458847 5014 scope.go:117] "RemoveContainer" containerID="dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b" Dec 05 10:53:25 crc kubenswrapper[5014]: E1205 10:53:25.459710 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b\": container with ID starting with dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b not found: ID does not exist" containerID="dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.459743 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b"} err="failed to get container status \"dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b\": rpc error: code = NotFound desc = could not find container \"dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b\": container with ID starting with dd3d9a5d45b63b581021f68d6b61007e58dac9f9600f3ec8ab0e90081b8d6e2b not found: ID does not exist" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.459768 5014 scope.go:117] "RemoveContainer" containerID="3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.464213 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b84f6c8c-cj47g"] Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.468861 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6b66565f65-w4gx4"] Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.472908 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6b66565f65-w4gx4"] Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.478208 5014 scope.go:117] "RemoveContainer" containerID="3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9" Dec 05 10:53:25 crc kubenswrapper[5014]: E1205 10:53:25.478673 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9\": container with ID starting with 3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9 not found: ID does not exist" containerID="3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9" Dec 05 10:53:25 crc kubenswrapper[5014]: I1205 10:53:25.478736 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9"} err="failed to get container status \"3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9\": rpc error: code = NotFound desc = could not find container \"3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9\": container with ID starting with 3a6ac233d43d6bf0be4b45400515eb88be909f80cc5df1553cee2f7e4252d8c9 not found: ID does not exist" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.016629 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn"] Dec 05 10:53:26 crc kubenswrapper[5014]: E1205 10:53:26.017084 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc41473-9ee4-4078-941a-35bebe92cb8f" containerName="route-controller-manager" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.017107 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc41473-9ee4-4078-941a-35bebe92cb8f" containerName="route-controller-manager" Dec 05 10:53:26 crc kubenswrapper[5014]: E1205 10:53:26.017136 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4f04b1a-75c8-4d3c-b941-f479295fc73f" containerName="controller-manager" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.017151 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4f04b1a-75c8-4d3c-b941-f479295fc73f" containerName="controller-manager" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.017371 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4f04b1a-75c8-4d3c-b941-f479295fc73f" containerName="controller-manager" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.017398 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dc41473-9ee4-4078-941a-35bebe92cb8f" containerName="route-controller-manager" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.018078 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.019504 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f4984c5bc-m68g7"] Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.020257 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.020325 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.021221 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.021263 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.021584 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.021756 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.024132 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.024609 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.025185 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.025623 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.025646 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.025902 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.026321 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.033189 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f4984c5bc-m68g7"] Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.034620 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.036232 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn"] Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.162738 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br4mn\" (UniqueName: \"kubernetes.io/projected/c3517b4a-d889-41f2-8f43-e4d25da72e9d-kube-api-access-br4mn\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.162783 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-config\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.162819 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-client-ca\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.162838 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3517b4a-d889-41f2-8f43-e4d25da72e9d-serving-cert\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.163362 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3517b4a-d889-41f2-8f43-e4d25da72e9d-config\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.163430 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-serving-cert\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.163495 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-proxy-ca-bundles\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.163523 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3517b4a-d889-41f2-8f43-e4d25da72e9d-client-ca\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.163614 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfc9n\" (UniqueName: \"kubernetes.io/projected/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-kube-api-access-vfc9n\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.265033 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-proxy-ca-bundles\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.265101 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3517b4a-d889-41f2-8f43-e4d25da72e9d-client-ca\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.265130 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfc9n\" (UniqueName: \"kubernetes.io/projected/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-kube-api-access-vfc9n\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.265169 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br4mn\" (UniqueName: \"kubernetes.io/projected/c3517b4a-d889-41f2-8f43-e4d25da72e9d-kube-api-access-br4mn\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.265198 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-config\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.265228 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3517b4a-d889-41f2-8f43-e4d25da72e9d-serving-cert\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.265251 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-client-ca\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.265304 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3517b4a-d889-41f2-8f43-e4d25da72e9d-config\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.265356 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-serving-cert\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.266189 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3517b4a-d889-41f2-8f43-e4d25da72e9d-client-ca\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.266249 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-client-ca\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.266390 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-proxy-ca-bundles\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.266565 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3517b4a-d889-41f2-8f43-e4d25da72e9d-config\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.266682 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-config\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.272986 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3517b4a-d889-41f2-8f43-e4d25da72e9d-serving-cert\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.273011 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-serving-cert\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.301892 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br4mn\" (UniqueName: \"kubernetes.io/projected/c3517b4a-d889-41f2-8f43-e4d25da72e9d-kube-api-access-br4mn\") pod \"route-controller-manager-5f74d98658-892hn\" (UID: \"c3517b4a-d889-41f2-8f43-e4d25da72e9d\") " pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.308183 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfc9n\" (UniqueName: \"kubernetes.io/projected/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-kube-api-access-vfc9n\") pod \"controller-manager-7f4984c5bc-m68g7\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.336246 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.344727 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.556073 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f4984c5bc-m68g7"] Dec 05 10:53:26 crc kubenswrapper[5014]: I1205 10:53:26.597451 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn"] Dec 05 10:53:26 crc kubenswrapper[5014]: W1205 10:53:26.600721 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3517b4a_d889_41f2_8f43_e4d25da72e9d.slice/crio-76e588c05754fd34f7762d160cc4a629b3566aaf56f99871583af5d2f1dc5a49 WatchSource:0}: Error finding container 76e588c05754fd34f7762d160cc4a629b3566aaf56f99871583af5d2f1dc5a49: Status 404 returned error can't find the container with id 76e588c05754fd34f7762d160cc4a629b3566aaf56f99871583af5d2f1dc5a49 Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.325053 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dc41473-9ee4-4078-941a-35bebe92cb8f" path="/var/lib/kubelet/pods/3dc41473-9ee4-4078-941a-35bebe92cb8f/volumes" Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.326189 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4f04b1a-75c8-4d3c-b941-f479295fc73f" path="/var/lib/kubelet/pods/f4f04b1a-75c8-4d3c-b941-f479295fc73f/volumes" Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.459468 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" event={"ID":"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8","Type":"ContainerStarted","Data":"0858ad88de559824d652b3061fbaea0420909058c97f2beca1d94bd383ca00d4"} Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.459524 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" event={"ID":"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8","Type":"ContainerStarted","Data":"5f7e99a986fe0487f2ad689fd8778478de80700c5ac1f17025dd5cdd12b1caf7"} Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.459812 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.462536 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" event={"ID":"c3517b4a-d889-41f2-8f43-e4d25da72e9d","Type":"ContainerStarted","Data":"7ba2936f591fffc46b070e39f22a3068f92b3e14314f817224aece6c80828124"} Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.462597 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" event={"ID":"c3517b4a-d889-41f2-8f43-e4d25da72e9d","Type":"ContainerStarted","Data":"76e588c05754fd34f7762d160cc4a629b3566aaf56f99871583af5d2f1dc5a49"} Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.462773 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.465007 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.470261 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.478035 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" podStartSLOduration=3.478019209 podStartE2EDuration="3.478019209s" podCreationTimestamp="2025-12-05 10:53:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:53:27.476968303 +0000 UTC m=+334.425086007" watchObservedRunningTime="2025-12-05 10:53:27.478019209 +0000 UTC m=+334.426136913" Dec 05 10:53:27 crc kubenswrapper[5014]: I1205 10:53:27.491379 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5f74d98658-892hn" podStartSLOduration=3.4913617 podStartE2EDuration="3.4913617s" podCreationTimestamp="2025-12-05 10:53:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:53:27.491007522 +0000 UTC m=+334.439125256" watchObservedRunningTime="2025-12-05 10:53:27.4913617 +0000 UTC m=+334.439479404" Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.355088 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f4984c5bc-m68g7"] Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.356020 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" podUID="3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" containerName="controller-manager" containerID="cri-o://0858ad88de559824d652b3061fbaea0420909058c97f2beca1d94bd383ca00d4" gracePeriod=30 Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.518696 5014 generic.go:334] "Generic (PLEG): container finished" podID="3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" containerID="0858ad88de559824d652b3061fbaea0420909058c97f2beca1d94bd383ca00d4" exitCode=0 Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.518756 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" event={"ID":"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8","Type":"ContainerDied","Data":"0858ad88de559824d652b3061fbaea0420909058c97f2beca1d94bd383ca00d4"} Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.874448 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.930718 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-config\") pod \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.930778 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-proxy-ca-bundles\") pod \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.930818 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-serving-cert\") pod \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.930853 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-client-ca\") pod \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.930895 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfc9n\" (UniqueName: \"kubernetes.io/projected/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-kube-api-access-vfc9n\") pod \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\" (UID: \"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8\") " Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.931757 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-client-ca" (OuterVolumeSpecName: "client-ca") pod "3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" (UID: "3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.931787 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" (UID: "3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.932429 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-config" (OuterVolumeSpecName: "config") pod "3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" (UID: "3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.936239 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-kube-api-access-vfc9n" (OuterVolumeSpecName: "kube-api-access-vfc9n") pod "3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" (UID: "3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8"). InnerVolumeSpecName "kube-api-access-vfc9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:53:37 crc kubenswrapper[5014]: I1205 10:53:37.936871 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" (UID: "3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:53:38 crc kubenswrapper[5014]: I1205 10:53:38.032133 5014 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:38 crc kubenswrapper[5014]: I1205 10:53:38.032373 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfc9n\" (UniqueName: \"kubernetes.io/projected/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-kube-api-access-vfc9n\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:38 crc kubenswrapper[5014]: I1205 10:53:38.032409 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:38 crc kubenswrapper[5014]: I1205 10:53:38.032420 5014 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:38 crc kubenswrapper[5014]: I1205 10:53:38.032432 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:38 crc kubenswrapper[5014]: I1205 10:53:38.525800 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" event={"ID":"3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8","Type":"ContainerDied","Data":"5f7e99a986fe0487f2ad689fd8778478de80700c5ac1f17025dd5cdd12b1caf7"} Dec 05 10:53:38 crc kubenswrapper[5014]: I1205 10:53:38.525869 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f4984c5bc-m68g7" Dec 05 10:53:38 crc kubenswrapper[5014]: I1205 10:53:38.525886 5014 scope.go:117] "RemoveContainer" containerID="0858ad88de559824d652b3061fbaea0420909058c97f2beca1d94bd383ca00d4" Dec 05 10:53:38 crc kubenswrapper[5014]: I1205 10:53:38.554577 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f4984c5bc-m68g7"] Dec 05 10:53:38 crc kubenswrapper[5014]: I1205 10:53:38.558480 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7f4984c5bc-m68g7"] Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.023743 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h"] Dec 05 10:53:39 crc kubenswrapper[5014]: E1205 10:53:39.024019 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" containerName="controller-manager" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.024035 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" containerName="controller-manager" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.024175 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" containerName="controller-manager" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.024691 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.027981 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.028385 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.028725 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.029295 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.030063 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.034952 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h"] Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.035397 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.038241 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.047808 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8gdh\" (UniqueName: \"kubernetes.io/projected/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-kube-api-access-q8gdh\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.047850 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-serving-cert\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.047880 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-client-ca\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.047901 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-proxy-ca-bundles\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.047923 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-config\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.149207 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-config\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.149367 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8gdh\" (UniqueName: \"kubernetes.io/projected/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-kube-api-access-q8gdh\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.149391 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-serving-cert\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.149427 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-client-ca\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.149448 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-proxy-ca-bundles\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.150429 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-client-ca\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.150754 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-proxy-ca-bundles\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.151487 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-config\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.154783 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-serving-cert\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.166238 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8gdh\" (UniqueName: \"kubernetes.io/projected/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-kube-api-access-q8gdh\") pod \"controller-manager-6f4c9485b4-wpb2h\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.324035 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8" path="/var/lib/kubelet/pods/3d0a86fb-f2f0-46e8-8b64-3b8bb9f7c8c8/volumes" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.338422 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:39 crc kubenswrapper[5014]: I1205 10:53:39.732940 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h"] Dec 05 10:53:40 crc kubenswrapper[5014]: I1205 10:53:40.541403 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" event={"ID":"dcfae809-0fee-4df1-896c-3c1ee8ec04fb","Type":"ContainerStarted","Data":"5edcb87a05ab75d8bc092bf5ba88f7ebd75baed6b697a7ebb6294523e9c7c427"} Dec 05 10:53:40 crc kubenswrapper[5014]: I1205 10:53:40.541854 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" event={"ID":"dcfae809-0fee-4df1-896c-3c1ee8ec04fb","Type":"ContainerStarted","Data":"d9ff0694dc9170f54c05225df83fbae12980e26b05fe4017bf5c75a459c5d979"} Dec 05 10:53:40 crc kubenswrapper[5014]: I1205 10:53:40.541891 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:40 crc kubenswrapper[5014]: I1205 10:53:40.547766 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:40 crc kubenswrapper[5014]: I1205 10:53:40.561223 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" podStartSLOduration=3.5612087519999998 podStartE2EDuration="3.561208752s" podCreationTimestamp="2025-12-05 10:53:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:53:40.560748962 +0000 UTC m=+347.508866696" watchObservedRunningTime="2025-12-05 10:53:40.561208752 +0000 UTC m=+347.509326456" Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.400845 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h"] Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.401420 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" podUID="dcfae809-0fee-4df1-896c-3c1ee8ec04fb" containerName="controller-manager" containerID="cri-o://5edcb87a05ab75d8bc092bf5ba88f7ebd75baed6b697a7ebb6294523e9c7c427" gracePeriod=30 Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.559925 5014 generic.go:334] "Generic (PLEG): container finished" podID="dcfae809-0fee-4df1-896c-3c1ee8ec04fb" containerID="5edcb87a05ab75d8bc092bf5ba88f7ebd75baed6b697a7ebb6294523e9c7c427" exitCode=0 Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.560022 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" event={"ID":"dcfae809-0fee-4df1-896c-3c1ee8ec04fb","Type":"ContainerDied","Data":"5edcb87a05ab75d8bc092bf5ba88f7ebd75baed6b697a7ebb6294523e9c7c427"} Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.857748 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.923406 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-proxy-ca-bundles\") pod \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.923502 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-config\") pod \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.923523 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-client-ca\") pod \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.923564 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8gdh\" (UniqueName: \"kubernetes.io/projected/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-kube-api-access-q8gdh\") pod \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.923586 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-serving-cert\") pod \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\" (UID: \"dcfae809-0fee-4df1-896c-3c1ee8ec04fb\") " Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.924149 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "dcfae809-0fee-4df1-896c-3c1ee8ec04fb" (UID: "dcfae809-0fee-4df1-896c-3c1ee8ec04fb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.924182 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-client-ca" (OuterVolumeSpecName: "client-ca") pod "dcfae809-0fee-4df1-896c-3c1ee8ec04fb" (UID: "dcfae809-0fee-4df1-896c-3c1ee8ec04fb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.924200 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-config" (OuterVolumeSpecName: "config") pod "dcfae809-0fee-4df1-896c-3c1ee8ec04fb" (UID: "dcfae809-0fee-4df1-896c-3c1ee8ec04fb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.929166 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "dcfae809-0fee-4df1-896c-3c1ee8ec04fb" (UID: "dcfae809-0fee-4df1-896c-3c1ee8ec04fb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:53:44 crc kubenswrapper[5014]: I1205 10:53:44.932654 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-kube-api-access-q8gdh" (OuterVolumeSpecName: "kube-api-access-q8gdh") pod "dcfae809-0fee-4df1-896c-3c1ee8ec04fb" (UID: "dcfae809-0fee-4df1-896c-3c1ee8ec04fb"). InnerVolumeSpecName "kube-api-access-q8gdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.024518 5014 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.024868 5014 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.024879 5014 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.024889 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-config\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.024899 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8gdh\" (UniqueName: \"kubernetes.io/projected/dcfae809-0fee-4df1-896c-3c1ee8ec04fb-kube-api-access-q8gdh\") on node \"crc\" DevicePath \"\"" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.295482 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-tffj9"] Dec 05 10:53:45 crc kubenswrapper[5014]: E1205 10:53:45.295751 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcfae809-0fee-4df1-896c-3c1ee8ec04fb" containerName="controller-manager" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.295766 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcfae809-0fee-4df1-896c-3c1ee8ec04fb" containerName="controller-manager" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.295879 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcfae809-0fee-4df1-896c-3c1ee8ec04fb" containerName="controller-manager" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.296367 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.310548 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-tffj9"] Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.427657 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/53bcebb4-9359-4505-87bb-42945cce3c25-registry-tls\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.427710 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/53bcebb4-9359-4505-87bb-42945cce3c25-bound-sa-token\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.427757 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/53bcebb4-9359-4505-87bb-42945cce3c25-ca-trust-extracted\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.427791 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.427878 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnt5r\" (UniqueName: \"kubernetes.io/projected/53bcebb4-9359-4505-87bb-42945cce3c25-kube-api-access-xnt5r\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.427968 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/53bcebb4-9359-4505-87bb-42945cce3c25-trusted-ca\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.428006 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/53bcebb4-9359-4505-87bb-42945cce3c25-installation-pull-secrets\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.428039 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/53bcebb4-9359-4505-87bb-42945cce3c25-registry-certificates\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.450971 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.529348 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnt5r\" (UniqueName: \"kubernetes.io/projected/53bcebb4-9359-4505-87bb-42945cce3c25-kube-api-access-xnt5r\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.529418 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/53bcebb4-9359-4505-87bb-42945cce3c25-trusted-ca\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.529463 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/53bcebb4-9359-4505-87bb-42945cce3c25-installation-pull-secrets\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.529529 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/53bcebb4-9359-4505-87bb-42945cce3c25-registry-certificates\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.530550 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/53bcebb4-9359-4505-87bb-42945cce3c25-registry-tls\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.530594 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/53bcebb4-9359-4505-87bb-42945cce3c25-bound-sa-token\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.530637 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/53bcebb4-9359-4505-87bb-42945cce3c25-ca-trust-extracted\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.530924 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/53bcebb4-9359-4505-87bb-42945cce3c25-trusted-ca\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.531041 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/53bcebb4-9359-4505-87bb-42945cce3c25-ca-trust-extracted\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.532085 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/53bcebb4-9359-4505-87bb-42945cce3c25-registry-certificates\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.535714 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/53bcebb4-9359-4505-87bb-42945cce3c25-installation-pull-secrets\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.535773 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/53bcebb4-9359-4505-87bb-42945cce3c25-registry-tls\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.551471 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/53bcebb4-9359-4505-87bb-42945cce3c25-bound-sa-token\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.558126 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnt5r\" (UniqueName: \"kubernetes.io/projected/53bcebb4-9359-4505-87bb-42945cce3c25-kube-api-access-xnt5r\") pod \"image-registry-66df7c8f76-tffj9\" (UID: \"53bcebb4-9359-4505-87bb-42945cce3c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.569368 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" event={"ID":"dcfae809-0fee-4df1-896c-3c1ee8ec04fb","Type":"ContainerDied","Data":"d9ff0694dc9170f54c05225df83fbae12980e26b05fe4017bf5c75a459c5d979"} Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.569422 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.569425 5014 scope.go:117] "RemoveContainer" containerID="5edcb87a05ab75d8bc092bf5ba88f7ebd75baed6b697a7ebb6294523e9c7c427" Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.598578 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h"] Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.603483 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6f4c9485b4-wpb2h"] Dec 05 10:53:45 crc kubenswrapper[5014]: I1205 10:53:45.610241 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.035973 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2"] Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.037870 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.040614 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.040847 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.041314 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.041403 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.041478 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.042166 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.065751 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2"] Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.069908 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.091524 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-tffj9"] Dec 05 10:53:46 crc kubenswrapper[5014]: W1205 10:53:46.095828 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53bcebb4_9359_4505_87bb_42945cce3c25.slice/crio-3c47fedce916f114ed8f204d2feaaaebba9c3a8609b0f19e4dcd5fec97f24601 WatchSource:0}: Error finding container 3c47fedce916f114ed8f204d2feaaaebba9c3a8609b0f19e4dcd5fec97f24601: Status 404 returned error can't find the container with id 3c47fedce916f114ed8f204d2feaaaebba9c3a8609b0f19e4dcd5fec97f24601 Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.141029 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-client-ca\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.141238 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-serving-cert\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.141317 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-proxy-ca-bundles\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.141385 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-config\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.141455 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg9sq\" (UniqueName: \"kubernetes.io/projected/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-kube-api-access-fg9sq\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.242517 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-config\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.242582 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg9sq\" (UniqueName: \"kubernetes.io/projected/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-kube-api-access-fg9sq\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.242607 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-client-ca\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.242639 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-serving-cert\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.242665 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-proxy-ca-bundles\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.244203 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-client-ca\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.244350 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-proxy-ca-bundles\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.245509 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-config\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.248703 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-serving-cert\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.262042 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg9sq\" (UniqueName: \"kubernetes.io/projected/0165ef85-952f-4fe6-9ed4-5d547c03c6fc-kube-api-access-fg9sq\") pod \"controller-manager-7f4984c5bc-9h2b2\" (UID: \"0165ef85-952f-4fe6-9ed4-5d547c03c6fc\") " pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.371108 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.575361 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" event={"ID":"53bcebb4-9359-4505-87bb-42945cce3c25","Type":"ContainerStarted","Data":"cc129d41b6cf7564bab208be67d0c3d562a69a271426da3d335f44bba3782690"} Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.575401 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" event={"ID":"53bcebb4-9359-4505-87bb-42945cce3c25","Type":"ContainerStarted","Data":"3c47fedce916f114ed8f204d2feaaaebba9c3a8609b0f19e4dcd5fec97f24601"} Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.576655 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.609743 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" podStartSLOduration=1.609717752 podStartE2EDuration="1.609717752s" podCreationTimestamp="2025-12-05 10:53:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:53:46.600425978 +0000 UTC m=+353.548543712" watchObservedRunningTime="2025-12-05 10:53:46.609717752 +0000 UTC m=+353.557835456" Dec 05 10:53:46 crc kubenswrapper[5014]: I1205 10:53:46.611059 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2"] Dec 05 10:53:47 crc kubenswrapper[5014]: I1205 10:53:47.329855 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcfae809-0fee-4df1-896c-3c1ee8ec04fb" path="/var/lib/kubelet/pods/dcfae809-0fee-4df1-896c-3c1ee8ec04fb/volumes" Dec 05 10:53:47 crc kubenswrapper[5014]: I1205 10:53:47.582101 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" event={"ID":"0165ef85-952f-4fe6-9ed4-5d547c03c6fc","Type":"ContainerStarted","Data":"e289a8cc501687e027e4906d87e32b2cdc34c0183d6d0dbea062b6dfb3a7aed6"} Dec 05 10:53:47 crc kubenswrapper[5014]: I1205 10:53:47.582481 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" event={"ID":"0165ef85-952f-4fe6-9ed4-5d547c03c6fc","Type":"ContainerStarted","Data":"7aa955e50638f2c4fe24097868f1795c705b64afc7eb23e0d70ed6c1c9ee7129"} Dec 05 10:53:47 crc kubenswrapper[5014]: I1205 10:53:47.604536 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" podStartSLOduration=3.604518776 podStartE2EDuration="3.604518776s" podCreationTimestamp="2025-12-05 10:53:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:53:47.599831293 +0000 UTC m=+354.547949017" watchObservedRunningTime="2025-12-05 10:53:47.604518776 +0000 UTC m=+354.552636480" Dec 05 10:53:48 crc kubenswrapper[5014]: I1205 10:53:48.586217 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:53:48 crc kubenswrapper[5014]: I1205 10:53:48.590917 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f4984c5bc-9h2b2" Dec 05 10:54:02 crc kubenswrapper[5014]: I1205 10:54:02.937129 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 10:54:02 crc kubenswrapper[5014]: I1205 10:54:02.938021 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 10:54:05 crc kubenswrapper[5014]: I1205 10:54:05.616148 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-tffj9" Dec 05 10:54:05 crc kubenswrapper[5014]: I1205 10:54:05.672786 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2s2hb"] Dec 05 10:54:05 crc kubenswrapper[5014]: I1205 10:54:05.935418 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8v5hq"] Dec 05 10:54:05 crc kubenswrapper[5014]: I1205 10:54:05.935706 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8v5hq" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerName="registry-server" containerID="cri-o://64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872" gracePeriod=2 Dec 05 10:54:06 crc kubenswrapper[5014]: E1205 10:54:06.269124 5014 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872 is running failed: container process not found" containerID="64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 10:54:06 crc kubenswrapper[5014]: E1205 10:54:06.269667 5014 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872 is running failed: container process not found" containerID="64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 10:54:06 crc kubenswrapper[5014]: E1205 10:54:06.269917 5014 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872 is running failed: container process not found" containerID="64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 10:54:06 crc kubenswrapper[5014]: E1205 10:54:06.269959 5014 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-8v5hq" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerName="registry-server" Dec 05 10:54:06 crc kubenswrapper[5014]: I1205 10:54:06.719260 5014 generic.go:334] "Generic (PLEG): container finished" podID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerID="64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872" exitCode=0 Dec 05 10:54:06 crc kubenswrapper[5014]: I1205 10:54:06.719303 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8v5hq" event={"ID":"132d8475-31f2-4d2c-90d2-7d7739cc0fea","Type":"ContainerDied","Data":"64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872"} Dec 05 10:54:06 crc kubenswrapper[5014]: I1205 10:54:06.905147 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.050726 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-utilities\") pod \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.050809 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4c9w\" (UniqueName: \"kubernetes.io/projected/132d8475-31f2-4d2c-90d2-7d7739cc0fea-kube-api-access-h4c9w\") pod \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.050940 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-catalog-content\") pod \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\" (UID: \"132d8475-31f2-4d2c-90d2-7d7739cc0fea\") " Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.052468 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-utilities" (OuterVolumeSpecName: "utilities") pod "132d8475-31f2-4d2c-90d2-7d7739cc0fea" (UID: "132d8475-31f2-4d2c-90d2-7d7739cc0fea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.057323 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/132d8475-31f2-4d2c-90d2-7d7739cc0fea-kube-api-access-h4c9w" (OuterVolumeSpecName: "kube-api-access-h4c9w") pod "132d8475-31f2-4d2c-90d2-7d7739cc0fea" (UID: "132d8475-31f2-4d2c-90d2-7d7739cc0fea"). InnerVolumeSpecName "kube-api-access-h4c9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.114786 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "132d8475-31f2-4d2c-90d2-7d7739cc0fea" (UID: "132d8475-31f2-4d2c-90d2-7d7739cc0fea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.152981 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.153024 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/132d8475-31f2-4d2c-90d2-7d7739cc0fea-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.153036 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4c9w\" (UniqueName: \"kubernetes.io/projected/132d8475-31f2-4d2c-90d2-7d7739cc0fea-kube-api-access-h4c9w\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:07 crc kubenswrapper[5014]: E1205 10:54:07.429950 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod132d8475_31f2_4d2c_90d2_7d7739cc0fea.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod132d8475_31f2_4d2c_90d2_7d7739cc0fea.slice/crio-c06edeb286e091c08506206921bf8e87a3ce5af86f1fd5267dd1c23838bf8b9c\": RecentStats: unable to find data in memory cache]" Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.728434 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8v5hq" event={"ID":"132d8475-31f2-4d2c-90d2-7d7739cc0fea","Type":"ContainerDied","Data":"c06edeb286e091c08506206921bf8e87a3ce5af86f1fd5267dd1c23838bf8b9c"} Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.728561 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8v5hq" Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.728656 5014 scope.go:117] "RemoveContainer" containerID="64c2f3ea2cf4019afc4bcaec495886fc7260c587f9cdd229a6429fbbd553e872" Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.761360 5014 scope.go:117] "RemoveContainer" containerID="0b97f3903c49ea39905781cdaba87dbf0d9dff3f16221773ae62ebcf5e2e2a0d" Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.771322 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8v5hq"] Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.777648 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8v5hq"] Dec 05 10:54:07 crc kubenswrapper[5014]: I1205 10:54:07.780659 5014 scope.go:117] "RemoveContainer" containerID="628d4a8d7edb275095419a27d2b769793cc794f27d67c12e0b0d2e231239dcc4" Dec 05 10:54:09 crc kubenswrapper[5014]: I1205 10:54:09.328859 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" path="/var/lib/kubelet/pods/132d8475-31f2-4d2c-90d2-7d7739cc0fea/volumes" Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.785732 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2pv7s"] Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.786738 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2pv7s" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" containerName="registry-server" containerID="cri-o://f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6" gracePeriod=30 Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.798334 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cn8nz"] Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.798614 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cn8nz" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerName="registry-server" containerID="cri-o://eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142" gracePeriod=30 Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.815387 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nphwc"] Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.815799 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" containerName="marketplace-operator" containerID="cri-o://2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f" gracePeriod=30 Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.823399 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pxrjf"] Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.823669 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pxrjf" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerName="registry-server" containerID="cri-o://baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f" gracePeriod=30 Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.831406 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-blg9z"] Dec 05 10:54:13 crc kubenswrapper[5014]: E1205 10:54:13.831724 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerName="registry-server" Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.831748 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerName="registry-server" Dec 05 10:54:13 crc kubenswrapper[5014]: E1205 10:54:13.831763 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerName="extract-content" Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.831773 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerName="extract-content" Dec 05 10:54:13 crc kubenswrapper[5014]: E1205 10:54:13.831785 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerName="extract-utilities" Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.831796 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerName="extract-utilities" Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.831939 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="132d8475-31f2-4d2c-90d2-7d7739cc0fea" containerName="registry-server" Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.832430 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.843889 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d46f6"] Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.844153 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-d46f6" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerName="registry-server" containerID="cri-o://ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a" gracePeriod=30 Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.851658 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-blg9z"] Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.949081 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvfln\" (UniqueName: \"kubernetes.io/projected/6324df91-5676-4d76-969c-ed24a6f6d7bf-kube-api-access-lvfln\") pod \"marketplace-operator-79b997595-blg9z\" (UID: \"6324df91-5676-4d76-969c-ed24a6f6d7bf\") " pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.949158 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6324df91-5676-4d76-969c-ed24a6f6d7bf-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-blg9z\" (UID: \"6324df91-5676-4d76-969c-ed24a6f6d7bf\") " pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:13 crc kubenswrapper[5014]: I1205 10:54:13.949183 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6324df91-5676-4d76-969c-ed24a6f6d7bf-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-blg9z\" (UID: \"6324df91-5676-4d76-969c-ed24a6f6d7bf\") " pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.050700 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvfln\" (UniqueName: \"kubernetes.io/projected/6324df91-5676-4d76-969c-ed24a6f6d7bf-kube-api-access-lvfln\") pod \"marketplace-operator-79b997595-blg9z\" (UID: \"6324df91-5676-4d76-969c-ed24a6f6d7bf\") " pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.050771 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6324df91-5676-4d76-969c-ed24a6f6d7bf-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-blg9z\" (UID: \"6324df91-5676-4d76-969c-ed24a6f6d7bf\") " pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.050807 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6324df91-5676-4d76-969c-ed24a6f6d7bf-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-blg9z\" (UID: \"6324df91-5676-4d76-969c-ed24a6f6d7bf\") " pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.053941 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6324df91-5676-4d76-969c-ed24a6f6d7bf-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-blg9z\" (UID: \"6324df91-5676-4d76-969c-ed24a6f6d7bf\") " pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.057284 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6324df91-5676-4d76-969c-ed24a6f6d7bf-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-blg9z\" (UID: \"6324df91-5676-4d76-969c-ed24a6f6d7bf\") " pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.069678 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvfln\" (UniqueName: \"kubernetes.io/projected/6324df91-5676-4d76-969c-ed24a6f6d7bf-kube-api-access-lvfln\") pod \"marketplace-operator-79b997595-blg9z\" (UID: \"6324df91-5676-4d76-969c-ed24a6f6d7bf\") " pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.246066 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.296229 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.458531 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b584g\" (UniqueName: \"kubernetes.io/projected/a16883f7-65de-4e01-a7e3-adb349c31ea0-kube-api-access-b584g\") pod \"a16883f7-65de-4e01-a7e3-adb349c31ea0\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.458580 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-catalog-content\") pod \"a16883f7-65de-4e01-a7e3-adb349c31ea0\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.458629 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-utilities\") pod \"a16883f7-65de-4e01-a7e3-adb349c31ea0\" (UID: \"a16883f7-65de-4e01-a7e3-adb349c31ea0\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.462584 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-utilities" (OuterVolumeSpecName: "utilities") pod "a16883f7-65de-4e01-a7e3-adb349c31ea0" (UID: "a16883f7-65de-4e01-a7e3-adb349c31ea0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.467935 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a16883f7-65de-4e01-a7e3-adb349c31ea0-kube-api-access-b584g" (OuterVolumeSpecName: "kube-api-access-b584g") pod "a16883f7-65de-4e01-a7e3-adb349c31ea0" (UID: "a16883f7-65de-4e01-a7e3-adb349c31ea0"). InnerVolumeSpecName "kube-api-access-b584g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.520425 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.520427 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a16883f7-65de-4e01-a7e3-adb349c31ea0" (UID: "a16883f7-65de-4e01-a7e3-adb349c31ea0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.545657 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.546020 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.560915 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b584g\" (UniqueName: \"kubernetes.io/projected/a16883f7-65de-4e01-a7e3-adb349c31ea0-kube-api-access-b584g\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.560957 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.560974 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16883f7-65de-4e01-a7e3-adb349c31ea0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.568234 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662471 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-utilities\") pod \"716f3dfe-fa59-450b-ba26-31a7a26763a3\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662530 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-trusted-ca\") pod \"6d447c3b-5da9-443c-aeff-aa202692a222\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662559 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc5xm\" (UniqueName: \"kubernetes.io/projected/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-kube-api-access-vc5xm\") pod \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662591 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-catalog-content\") pod \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662648 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-catalog-content\") pod \"716f3dfe-fa59-450b-ba26-31a7a26763a3\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662674 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-utilities\") pod \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\" (UID: \"f35c6bb2-9a29-41b5-bfeb-39e8848b095f\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662698 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-catalog-content\") pod \"70885ea0-025c-45b1-9999-7a44c28312ba\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662717 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghsq7\" (UniqueName: \"kubernetes.io/projected/716f3dfe-fa59-450b-ba26-31a7a26763a3-kube-api-access-ghsq7\") pod \"716f3dfe-fa59-450b-ba26-31a7a26763a3\" (UID: \"716f3dfe-fa59-450b-ba26-31a7a26763a3\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662738 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrrs9\" (UniqueName: \"kubernetes.io/projected/70885ea0-025c-45b1-9999-7a44c28312ba-kube-api-access-vrrs9\") pod \"70885ea0-025c-45b1-9999-7a44c28312ba\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662771 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-utilities\") pod \"70885ea0-025c-45b1-9999-7a44c28312ba\" (UID: \"70885ea0-025c-45b1-9999-7a44c28312ba\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662820 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v8kt\" (UniqueName: \"kubernetes.io/projected/6d447c3b-5da9-443c-aeff-aa202692a222-kube-api-access-8v8kt\") pod \"6d447c3b-5da9-443c-aeff-aa202692a222\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.662872 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-operator-metrics\") pod \"6d447c3b-5da9-443c-aeff-aa202692a222\" (UID: \"6d447c3b-5da9-443c-aeff-aa202692a222\") " Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.663297 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-utilities" (OuterVolumeSpecName: "utilities") pod "716f3dfe-fa59-450b-ba26-31a7a26763a3" (UID: "716f3dfe-fa59-450b-ba26-31a7a26763a3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.663307 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "6d447c3b-5da9-443c-aeff-aa202692a222" (UID: "6d447c3b-5da9-443c-aeff-aa202692a222"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.667346 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-utilities" (OuterVolumeSpecName: "utilities") pod "70885ea0-025c-45b1-9999-7a44c28312ba" (UID: "70885ea0-025c-45b1-9999-7a44c28312ba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.667591 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-utilities" (OuterVolumeSpecName: "utilities") pod "f35c6bb2-9a29-41b5-bfeb-39e8848b095f" (UID: "f35c6bb2-9a29-41b5-bfeb-39e8848b095f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.668230 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70885ea0-025c-45b1-9999-7a44c28312ba-kube-api-access-vrrs9" (OuterVolumeSpecName: "kube-api-access-vrrs9") pod "70885ea0-025c-45b1-9999-7a44c28312ba" (UID: "70885ea0-025c-45b1-9999-7a44c28312ba"). InnerVolumeSpecName "kube-api-access-vrrs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.668882 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "6d447c3b-5da9-443c-aeff-aa202692a222" (UID: "6d447c3b-5da9-443c-aeff-aa202692a222"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.668960 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/716f3dfe-fa59-450b-ba26-31a7a26763a3-kube-api-access-ghsq7" (OuterVolumeSpecName: "kube-api-access-ghsq7") pod "716f3dfe-fa59-450b-ba26-31a7a26763a3" (UID: "716f3dfe-fa59-450b-ba26-31a7a26763a3"). InnerVolumeSpecName "kube-api-access-ghsq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.670872 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d447c3b-5da9-443c-aeff-aa202692a222-kube-api-access-8v8kt" (OuterVolumeSpecName: "kube-api-access-8v8kt") pod "6d447c3b-5da9-443c-aeff-aa202692a222" (UID: "6d447c3b-5da9-443c-aeff-aa202692a222"). InnerVolumeSpecName "kube-api-access-8v8kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.671434 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-kube-api-access-vc5xm" (OuterVolumeSpecName: "kube-api-access-vc5xm") pod "f35c6bb2-9a29-41b5-bfeb-39e8848b095f" (UID: "f35c6bb2-9a29-41b5-bfeb-39e8848b095f"). InnerVolumeSpecName "kube-api-access-vc5xm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.680549 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f35c6bb2-9a29-41b5-bfeb-39e8848b095f" (UID: "f35c6bb2-9a29-41b5-bfeb-39e8848b095f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.720505 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "70885ea0-025c-45b1-9999-7a44c28312ba" (UID: "70885ea0-025c-45b1-9999-7a44c28312ba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764063 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764099 5014 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764109 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc5xm\" (UniqueName: \"kubernetes.io/projected/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-kube-api-access-vc5xm\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764119 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764126 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f35c6bb2-9a29-41b5-bfeb-39e8848b095f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764135 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764145 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghsq7\" (UniqueName: \"kubernetes.io/projected/716f3dfe-fa59-450b-ba26-31a7a26763a3-kube-api-access-ghsq7\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764156 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrrs9\" (UniqueName: \"kubernetes.io/projected/70885ea0-025c-45b1-9999-7a44c28312ba-kube-api-access-vrrs9\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764165 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70885ea0-025c-45b1-9999-7a44c28312ba-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764174 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v8kt\" (UniqueName: \"kubernetes.io/projected/6d447c3b-5da9-443c-aeff-aa202692a222-kube-api-access-8v8kt\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.764181 5014 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6d447c3b-5da9-443c-aeff-aa202692a222-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.780325 5014 generic.go:334] "Generic (PLEG): container finished" podID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerID="eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142" exitCode=0 Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.780382 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cn8nz" event={"ID":"a16883f7-65de-4e01-a7e3-adb349c31ea0","Type":"ContainerDied","Data":"eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142"} Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.780410 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cn8nz" event={"ID":"a16883f7-65de-4e01-a7e3-adb349c31ea0","Type":"ContainerDied","Data":"bed48e9a59ebb1215b942e6380ec450867450ba66c533a604c24185c477a95f1"} Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.780426 5014 scope.go:117] "RemoveContainer" containerID="eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.780527 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cn8nz" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.783892 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "716f3dfe-fa59-450b-ba26-31a7a26763a3" (UID: "716f3dfe-fa59-450b-ba26-31a7a26763a3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.783925 5014 generic.go:334] "Generic (PLEG): container finished" podID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerID="baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f" exitCode=0 Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.783996 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pxrjf" event={"ID":"f35c6bb2-9a29-41b5-bfeb-39e8848b095f","Type":"ContainerDied","Data":"baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f"} Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.784027 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pxrjf" event={"ID":"f35c6bb2-9a29-41b5-bfeb-39e8848b095f","Type":"ContainerDied","Data":"5402c35bed5e183d47b55e55fcb918efbf045181bc779dcd87771f4ac280f9fe"} Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.784056 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pxrjf" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.786680 5014 generic.go:334] "Generic (PLEG): container finished" podID="6d447c3b-5da9-443c-aeff-aa202692a222" containerID="2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f" exitCode=0 Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.786742 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" event={"ID":"6d447c3b-5da9-443c-aeff-aa202692a222","Type":"ContainerDied","Data":"2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f"} Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.786764 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" event={"ID":"6d447c3b-5da9-443c-aeff-aa202692a222","Type":"ContainerDied","Data":"af74898c9c4def70ad1b52030bebed5f6c956f31744ab0fec971d468d0e79abd"} Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.786829 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nphwc" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.800387 5014 scope.go:117] "RemoveContainer" containerID="75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.802823 5014 generic.go:334] "Generic (PLEG): container finished" podID="70885ea0-025c-45b1-9999-7a44c28312ba" containerID="f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6" exitCode=0 Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.802934 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2pv7s" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.802939 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2pv7s" event={"ID":"70885ea0-025c-45b1-9999-7a44c28312ba","Type":"ContainerDied","Data":"f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6"} Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.803069 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2pv7s" event={"ID":"70885ea0-025c-45b1-9999-7a44c28312ba","Type":"ContainerDied","Data":"fb40ec340edbd6dadb21cd7074353b9fca1d059a17351f875f9ce2e234c2dcfa"} Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.807142 5014 generic.go:334] "Generic (PLEG): container finished" podID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerID="ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a" exitCode=0 Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.807179 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d46f6" event={"ID":"716f3dfe-fa59-450b-ba26-31a7a26763a3","Type":"ContainerDied","Data":"ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a"} Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.807344 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d46f6" event={"ID":"716f3dfe-fa59-450b-ba26-31a7a26763a3","Type":"ContainerDied","Data":"4aec3ebb244362c0cafab193b6e9aef3c43297eae7a5ce71790d73bca6b30616"} Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.807418 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d46f6" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.836972 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-blg9z"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.838327 5014 scope.go:117] "RemoveContainer" containerID="c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.855777 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cn8nz"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.861133 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cn8nz"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.866506 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/716f3dfe-fa59-450b-ba26-31a7a26763a3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.871324 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pxrjf"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.872177 5014 scope.go:117] "RemoveContainer" containerID="eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142" Dec 05 10:54:14 crc kubenswrapper[5014]: E1205 10:54:14.872591 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142\": container with ID starting with eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142 not found: ID does not exist" containerID="eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.872649 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142"} err="failed to get container status \"eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142\": rpc error: code = NotFound desc = could not find container \"eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142\": container with ID starting with eb7015c880278909098f29908e7d3a9394e92a42064f49b63207ad5663c23142 not found: ID does not exist" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.872678 5014 scope.go:117] "RemoveContainer" containerID="75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289" Dec 05 10:54:14 crc kubenswrapper[5014]: E1205 10:54:14.872952 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289\": container with ID starting with 75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289 not found: ID does not exist" containerID="75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.872977 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289"} err="failed to get container status \"75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289\": rpc error: code = NotFound desc = could not find container \"75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289\": container with ID starting with 75e6e58d0be042dfa0e057700b7ad4ce81fa7014956223e7afbf4a4e5cd55289 not found: ID does not exist" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.872994 5014 scope.go:117] "RemoveContainer" containerID="c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f" Dec 05 10:54:14 crc kubenswrapper[5014]: E1205 10:54:14.873229 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f\": container with ID starting with c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f not found: ID does not exist" containerID="c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.873254 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f"} err="failed to get container status \"c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f\": rpc error: code = NotFound desc = could not find container \"c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f\": container with ID starting with c32867d57834670b0a86e0d6c6edde765fbb7082e5d97c25e553bff81cb1473f not found: ID does not exist" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.873278 5014 scope.go:117] "RemoveContainer" containerID="baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.873341 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pxrjf"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.878146 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nphwc"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.884217 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nphwc"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.891970 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d46f6"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.892347 5014 scope.go:117] "RemoveContainer" containerID="b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.899714 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-d46f6"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.902812 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2pv7s"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.907753 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2pv7s"] Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.909307 5014 scope.go:117] "RemoveContainer" containerID="027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.922523 5014 scope.go:117] "RemoveContainer" containerID="baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f" Dec 05 10:54:14 crc kubenswrapper[5014]: E1205 10:54:14.923108 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f\": container with ID starting with baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f not found: ID does not exist" containerID="baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.923142 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f"} err="failed to get container status \"baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f\": rpc error: code = NotFound desc = could not find container \"baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f\": container with ID starting with baf5dfafa6028404b6dc323e7f689b23480bbee508fdcaed2840877b8785f75f not found: ID does not exist" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.923166 5014 scope.go:117] "RemoveContainer" containerID="b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800" Dec 05 10:54:14 crc kubenswrapper[5014]: E1205 10:54:14.923367 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800\": container with ID starting with b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800 not found: ID does not exist" containerID="b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.923392 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800"} err="failed to get container status \"b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800\": rpc error: code = NotFound desc = could not find container \"b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800\": container with ID starting with b6960e1c708233f193c4b5a537737ab0f5a12616035fdfbc8a101386ec2aa800 not found: ID does not exist" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.923408 5014 scope.go:117] "RemoveContainer" containerID="027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6" Dec 05 10:54:14 crc kubenswrapper[5014]: E1205 10:54:14.923637 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6\": container with ID starting with 027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6 not found: ID does not exist" containerID="027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.923659 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6"} err="failed to get container status \"027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6\": rpc error: code = NotFound desc = could not find container \"027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6\": container with ID starting with 027aa0477b6e323214a76779d890e683b4b9ec7a755faf02f3d14f74895cd9c6 not found: ID does not exist" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.923672 5014 scope.go:117] "RemoveContainer" containerID="2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f" Dec 05 10:54:14 crc kubenswrapper[5014]: I1205 10:54:14.954439 5014 scope.go:117] "RemoveContainer" containerID="be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.020305 5014 scope.go:117] "RemoveContainer" containerID="2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.020756 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f\": container with ID starting with 2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f not found: ID does not exist" containerID="2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.020807 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f"} err="failed to get container status \"2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f\": rpc error: code = NotFound desc = could not find container \"2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f\": container with ID starting with 2de2cc5313ae8410a764eea15a95c6adea2a9ab37f2ca465bd5dff7fd878583f not found: ID does not exist" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.020840 5014 scope.go:117] "RemoveContainer" containerID="be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.021257 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826\": container with ID starting with be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826 not found: ID does not exist" containerID="be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.021362 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826"} err="failed to get container status \"be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826\": rpc error: code = NotFound desc = could not find container \"be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826\": container with ID starting with be8faa733841cfe89283f0bbf094c64f354698c927c5eb4e67950212cbeb0826 not found: ID does not exist" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.021415 5014 scope.go:117] "RemoveContainer" containerID="f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.035283 5014 scope.go:117] "RemoveContainer" containerID="5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.049859 5014 scope.go:117] "RemoveContainer" containerID="3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.067416 5014 scope.go:117] "RemoveContainer" containerID="f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.068191 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6\": container with ID starting with f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6 not found: ID does not exist" containerID="f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.068253 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6"} err="failed to get container status \"f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6\": rpc error: code = NotFound desc = could not find container \"f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6\": container with ID starting with f68df218f0e9614d1bdd17540bf16f95528969f119182715de0003280d5616f6 not found: ID does not exist" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.068311 5014 scope.go:117] "RemoveContainer" containerID="5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.068845 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6\": container with ID starting with 5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6 not found: ID does not exist" containerID="5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.068910 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6"} err="failed to get container status \"5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6\": rpc error: code = NotFound desc = could not find container \"5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6\": container with ID starting with 5c33a777ce8538ad9ecdfd6c2cf2120713f5c23b827df4c2374d2ea591077ef6 not found: ID does not exist" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.068953 5014 scope.go:117] "RemoveContainer" containerID="3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.069297 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923\": container with ID starting with 3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923 not found: ID does not exist" containerID="3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.069339 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923"} err="failed to get container status \"3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923\": rpc error: code = NotFound desc = could not find container \"3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923\": container with ID starting with 3df05eb124ce264c7f95a6be23dca0de6c755eb8a5f27a17263f3fbf0d96f923 not found: ID does not exist" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.069362 5014 scope.go:117] "RemoveContainer" containerID="ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.087393 5014 scope.go:117] "RemoveContainer" containerID="005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.103222 5014 scope.go:117] "RemoveContainer" containerID="0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.125816 5014 scope.go:117] "RemoveContainer" containerID="ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.126341 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a\": container with ID starting with ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a not found: ID does not exist" containerID="ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.126372 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a"} err="failed to get container status \"ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a\": rpc error: code = NotFound desc = could not find container \"ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a\": container with ID starting with ac7848f16dbb030e5ae783f2be3471d65d1fc55d69f9da0271cd45e73adc974a not found: ID does not exist" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.126393 5014 scope.go:117] "RemoveContainer" containerID="005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.127333 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de\": container with ID starting with 005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de not found: ID does not exist" containerID="005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.127383 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de"} err="failed to get container status \"005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de\": rpc error: code = NotFound desc = could not find container \"005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de\": container with ID starting with 005f70e9a35cc98b979504282f693c276aef68e3d0547d7b6684844e3a9116de not found: ID does not exist" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.127414 5014 scope.go:117] "RemoveContainer" containerID="0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.127698 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f\": container with ID starting with 0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f not found: ID does not exist" containerID="0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.127733 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f"} err="failed to get container status \"0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f\": rpc error: code = NotFound desc = could not find container \"0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f\": container with ID starting with 0f4279c2bf88b09bed4d43c5c2c00c234bc7b84113bec398ab68e42365217f7f not found: ID does not exist" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.325772 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" path="/var/lib/kubelet/pods/6d447c3b-5da9-443c-aeff-aa202692a222/volumes" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.326389 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" path="/var/lib/kubelet/pods/70885ea0-025c-45b1-9999-7a44c28312ba/volumes" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.327009 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" path="/var/lib/kubelet/pods/716f3dfe-fa59-450b-ba26-31a7a26763a3/volumes" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.327647 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" path="/var/lib/kubelet/pods/a16883f7-65de-4e01-a7e3-adb349c31ea0/volumes" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.328261 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" path="/var/lib/kubelet/pods/f35c6bb2-9a29-41b5-bfeb-39e8848b095f/volumes" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.824971 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" event={"ID":"6324df91-5676-4d76-969c-ed24a6f6d7bf","Type":"ContainerStarted","Data":"e39c3a8878155d70c95985423ecb6976098c48f6e29883b6a4a573a03ff40533"} Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.825040 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" event={"ID":"6324df91-5676-4d76-969c-ed24a6f6d7bf","Type":"ContainerStarted","Data":"ac6662fefb3311e003f1e42b212c43f0f4e0dea4b3a0a13180e109bde3bfcc48"} Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.825456 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.833486 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.850077 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-blg9z" podStartSLOduration=2.85005604 podStartE2EDuration="2.85005604s" podCreationTimestamp="2025-12-05 10:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 10:54:15.848853921 +0000 UTC m=+382.796971665" watchObservedRunningTime="2025-12-05 10:54:15.85005604 +0000 UTC m=+382.798173754" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.990753 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n2mqd"] Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.990942 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerName="extract-utilities" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.990953 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerName="extract-utilities" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.990962 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.990968 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.990976 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerName="extract-utilities" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.990982 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerName="extract-utilities" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.990994 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerName="extract-content" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991000 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerName="extract-content" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.991008 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991014 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.991021 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991027 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.991037 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" containerName="extract-content" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991042 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" containerName="extract-content" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.991051 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" containerName="extract-utilities" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991057 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" containerName="extract-utilities" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.991065 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerName="extract-utilities" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991073 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerName="extract-utilities" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.991080 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" containerName="marketplace-operator" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991086 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" containerName="marketplace-operator" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.991095 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" containerName="marketplace-operator" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991101 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" containerName="marketplace-operator" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.991108 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerName="extract-content" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991113 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerName="extract-content" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.991120 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerName="extract-content" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991125 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerName="extract-content" Dec 05 10:54:15 crc kubenswrapper[5014]: E1205 10:54:15.991131 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991137 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991215 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35c6bb2-9a29-41b5-bfeb-39e8848b095f" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991226 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" containerName="marketplace-operator" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991233 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="716f3dfe-fa59-450b-ba26-31a7a26763a3" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991240 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="70885ea0-025c-45b1-9999-7a44c28312ba" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991252 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d447c3b-5da9-443c-aeff-aa202692a222" containerName="marketplace-operator" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.991260 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="a16883f7-65de-4e01-a7e3-adb349c31ea0" containerName="registry-server" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.992031 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:15 crc kubenswrapper[5014]: I1205 10:54:15.997231 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.000596 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2mqd"] Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.080547 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-catalog-content\") pod \"redhat-marketplace-n2mqd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.080598 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnmtt\" (UniqueName: \"kubernetes.io/projected/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-kube-api-access-wnmtt\") pod \"redhat-marketplace-n2mqd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.080651 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-utilities\") pod \"redhat-marketplace-n2mqd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.183061 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-catalog-content\") pod \"redhat-marketplace-n2mqd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.183108 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnmtt\" (UniqueName: \"kubernetes.io/projected/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-kube-api-access-wnmtt\") pod \"redhat-marketplace-n2mqd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.183149 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-utilities\") pod \"redhat-marketplace-n2mqd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.183606 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-utilities\") pod \"redhat-marketplace-n2mqd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.183916 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-catalog-content\") pod \"redhat-marketplace-n2mqd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.189537 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-98n4c"] Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.190806 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.193091 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.205110 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnmtt\" (UniqueName: \"kubernetes.io/projected/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-kube-api-access-wnmtt\") pod \"redhat-marketplace-n2mqd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.206671 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-98n4c"] Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.284349 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b896048-86fc-4051-8a7d-5289365af88d-utilities\") pod \"certified-operators-98n4c\" (UID: \"5b896048-86fc-4051-8a7d-5289365af88d\") " pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.284396 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6p25\" (UniqueName: \"kubernetes.io/projected/5b896048-86fc-4051-8a7d-5289365af88d-kube-api-access-m6p25\") pod \"certified-operators-98n4c\" (UID: \"5b896048-86fc-4051-8a7d-5289365af88d\") " pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.284460 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b896048-86fc-4051-8a7d-5289365af88d-catalog-content\") pod \"certified-operators-98n4c\" (UID: \"5b896048-86fc-4051-8a7d-5289365af88d\") " pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.325479 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.387098 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b896048-86fc-4051-8a7d-5289365af88d-utilities\") pod \"certified-operators-98n4c\" (UID: \"5b896048-86fc-4051-8a7d-5289365af88d\") " pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.387149 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6p25\" (UniqueName: \"kubernetes.io/projected/5b896048-86fc-4051-8a7d-5289365af88d-kube-api-access-m6p25\") pod \"certified-operators-98n4c\" (UID: \"5b896048-86fc-4051-8a7d-5289365af88d\") " pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.387215 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b896048-86fc-4051-8a7d-5289365af88d-catalog-content\") pod \"certified-operators-98n4c\" (UID: \"5b896048-86fc-4051-8a7d-5289365af88d\") " pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.387681 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b896048-86fc-4051-8a7d-5289365af88d-catalog-content\") pod \"certified-operators-98n4c\" (UID: \"5b896048-86fc-4051-8a7d-5289365af88d\") " pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.387937 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b896048-86fc-4051-8a7d-5289365af88d-utilities\") pod \"certified-operators-98n4c\" (UID: \"5b896048-86fc-4051-8a7d-5289365af88d\") " pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.419332 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6p25\" (UniqueName: \"kubernetes.io/projected/5b896048-86fc-4051-8a7d-5289365af88d-kube-api-access-m6p25\") pod \"certified-operators-98n4c\" (UID: \"5b896048-86fc-4051-8a7d-5289365af88d\") " pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.504705 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.742526 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2mqd"] Dec 05 10:54:16 crc kubenswrapper[5014]: W1205 10:54:16.743599 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b4aa1ae_2e33_4ac0_b782_6f24111bc9dd.slice/crio-1988e72f09613b4abf8b485951fa38a1987ed0c82620bdf275d03e9972707425 WatchSource:0}: Error finding container 1988e72f09613b4abf8b485951fa38a1987ed0c82620bdf275d03e9972707425: Status 404 returned error can't find the container with id 1988e72f09613b4abf8b485951fa38a1987ed0c82620bdf275d03e9972707425 Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.844337 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2mqd" event={"ID":"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd","Type":"ContainerStarted","Data":"1988e72f09613b4abf8b485951fa38a1987ed0c82620bdf275d03e9972707425"} Dec 05 10:54:16 crc kubenswrapper[5014]: I1205 10:54:16.913763 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-98n4c"] Dec 05 10:54:16 crc kubenswrapper[5014]: W1205 10:54:16.973174 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b896048_86fc_4051_8a7d_5289365af88d.slice/crio-c6be18318f0ad4a5bc69d1d2265ffe2b858127b2948702b7f8d98ac85c6db8c3 WatchSource:0}: Error finding container c6be18318f0ad4a5bc69d1d2265ffe2b858127b2948702b7f8d98ac85c6db8c3: Status 404 returned error can't find the container with id c6be18318f0ad4a5bc69d1d2265ffe2b858127b2948702b7f8d98ac85c6db8c3 Dec 05 10:54:17 crc kubenswrapper[5014]: I1205 10:54:17.851872 5014 generic.go:334] "Generic (PLEG): container finished" podID="5b896048-86fc-4051-8a7d-5289365af88d" containerID="bda0d4de8c03563b958a3bbfac9abf7cedfbf88ce3c31479294aeb42ccc72b5c" exitCode=0 Dec 05 10:54:17 crc kubenswrapper[5014]: I1205 10:54:17.851926 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-98n4c" event={"ID":"5b896048-86fc-4051-8a7d-5289365af88d","Type":"ContainerDied","Data":"bda0d4de8c03563b958a3bbfac9abf7cedfbf88ce3c31479294aeb42ccc72b5c"} Dec 05 10:54:17 crc kubenswrapper[5014]: I1205 10:54:17.852334 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-98n4c" event={"ID":"5b896048-86fc-4051-8a7d-5289365af88d","Type":"ContainerStarted","Data":"c6be18318f0ad4a5bc69d1d2265ffe2b858127b2948702b7f8d98ac85c6db8c3"} Dec 05 10:54:17 crc kubenswrapper[5014]: I1205 10:54:17.855075 5014 generic.go:334] "Generic (PLEG): container finished" podID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerID="dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6" exitCode=0 Dec 05 10:54:17 crc kubenswrapper[5014]: I1205 10:54:17.855159 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2mqd" event={"ID":"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd","Type":"ContainerDied","Data":"dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6"} Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.397074 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r2n62"] Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.399540 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.402028 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.407437 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r2n62"] Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.541674 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6gjf\" (UniqueName: \"kubernetes.io/projected/39b91742-73f1-4264-842e-3429afa2bbc6-kube-api-access-t6gjf\") pod \"redhat-operators-r2n62\" (UID: \"39b91742-73f1-4264-842e-3429afa2bbc6\") " pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.542027 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39b91742-73f1-4264-842e-3429afa2bbc6-catalog-content\") pod \"redhat-operators-r2n62\" (UID: \"39b91742-73f1-4264-842e-3429afa2bbc6\") " pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.542092 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39b91742-73f1-4264-842e-3429afa2bbc6-utilities\") pod \"redhat-operators-r2n62\" (UID: \"39b91742-73f1-4264-842e-3429afa2bbc6\") " pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.595939 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-28kj9"] Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.597317 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.600961 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.608009 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-28kj9"] Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.643311 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6gjf\" (UniqueName: \"kubernetes.io/projected/39b91742-73f1-4264-842e-3429afa2bbc6-kube-api-access-t6gjf\") pod \"redhat-operators-r2n62\" (UID: \"39b91742-73f1-4264-842e-3429afa2bbc6\") " pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.643357 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39b91742-73f1-4264-842e-3429afa2bbc6-catalog-content\") pod \"redhat-operators-r2n62\" (UID: \"39b91742-73f1-4264-842e-3429afa2bbc6\") " pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.643427 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39b91742-73f1-4264-842e-3429afa2bbc6-utilities\") pod \"redhat-operators-r2n62\" (UID: \"39b91742-73f1-4264-842e-3429afa2bbc6\") " pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.643879 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39b91742-73f1-4264-842e-3429afa2bbc6-utilities\") pod \"redhat-operators-r2n62\" (UID: \"39b91742-73f1-4264-842e-3429afa2bbc6\") " pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.643922 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39b91742-73f1-4264-842e-3429afa2bbc6-catalog-content\") pod \"redhat-operators-r2n62\" (UID: \"39b91742-73f1-4264-842e-3429afa2bbc6\") " pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.666339 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6gjf\" (UniqueName: \"kubernetes.io/projected/39b91742-73f1-4264-842e-3429afa2bbc6-kube-api-access-t6gjf\") pod \"redhat-operators-r2n62\" (UID: \"39b91742-73f1-4264-842e-3429afa2bbc6\") " pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.744456 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/142ed841-421a-47b3-ad45-bc061c5f8e26-catalog-content\") pod \"community-operators-28kj9\" (UID: \"142ed841-421a-47b3-ad45-bc061c5f8e26\") " pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.744792 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/142ed841-421a-47b3-ad45-bc061c5f8e26-utilities\") pod \"community-operators-28kj9\" (UID: \"142ed841-421a-47b3-ad45-bc061c5f8e26\") " pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.744954 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd9nz\" (UniqueName: \"kubernetes.io/projected/142ed841-421a-47b3-ad45-bc061c5f8e26-kube-api-access-cd9nz\") pod \"community-operators-28kj9\" (UID: \"142ed841-421a-47b3-ad45-bc061c5f8e26\") " pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.759211 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.846249 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/142ed841-421a-47b3-ad45-bc061c5f8e26-catalog-content\") pod \"community-operators-28kj9\" (UID: \"142ed841-421a-47b3-ad45-bc061c5f8e26\") " pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.846368 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/142ed841-421a-47b3-ad45-bc061c5f8e26-utilities\") pod \"community-operators-28kj9\" (UID: \"142ed841-421a-47b3-ad45-bc061c5f8e26\") " pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.846412 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd9nz\" (UniqueName: \"kubernetes.io/projected/142ed841-421a-47b3-ad45-bc061c5f8e26-kube-api-access-cd9nz\") pod \"community-operators-28kj9\" (UID: \"142ed841-421a-47b3-ad45-bc061c5f8e26\") " pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.847132 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/142ed841-421a-47b3-ad45-bc061c5f8e26-utilities\") pod \"community-operators-28kj9\" (UID: \"142ed841-421a-47b3-ad45-bc061c5f8e26\") " pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.847188 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/142ed841-421a-47b3-ad45-bc061c5f8e26-catalog-content\") pod \"community-operators-28kj9\" (UID: \"142ed841-421a-47b3-ad45-bc061c5f8e26\") " pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.863219 5014 generic.go:334] "Generic (PLEG): container finished" podID="5b896048-86fc-4051-8a7d-5289365af88d" containerID="4b045039da3868f6e636bd25c062823fbcdd98e1079f6f75662f44dd8b5992a2" exitCode=0 Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.863370 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-98n4c" event={"ID":"5b896048-86fc-4051-8a7d-5289365af88d","Type":"ContainerDied","Data":"4b045039da3868f6e636bd25c062823fbcdd98e1079f6f75662f44dd8b5992a2"} Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.868158 5014 generic.go:334] "Generic (PLEG): container finished" podID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerID="f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938" exitCode=0 Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.868202 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2mqd" event={"ID":"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd","Type":"ContainerDied","Data":"f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938"} Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.878924 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd9nz\" (UniqueName: \"kubernetes.io/projected/142ed841-421a-47b3-ad45-bc061c5f8e26-kube-api-access-cd9nz\") pod \"community-operators-28kj9\" (UID: \"142ed841-421a-47b3-ad45-bc061c5f8e26\") " pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:18 crc kubenswrapper[5014]: I1205 10:54:18.936132 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.201380 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r2n62"] Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.338091 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-28kj9"] Dec 05 10:54:19 crc kubenswrapper[5014]: W1205 10:54:19.346420 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod142ed841_421a_47b3_ad45_bc061c5f8e26.slice/crio-34b18710e895f7f6e3fe0da8d2d6974ad178001a1e3ba5ce6de724bd468d44cf WatchSource:0}: Error finding container 34b18710e895f7f6e3fe0da8d2d6974ad178001a1e3ba5ce6de724bd468d44cf: Status 404 returned error can't find the container with id 34b18710e895f7f6e3fe0da8d2d6974ad178001a1e3ba5ce6de724bd468d44cf Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.875422 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-98n4c" event={"ID":"5b896048-86fc-4051-8a7d-5289365af88d","Type":"ContainerStarted","Data":"e4816d0d45db02a389e5525f5caf5b06ad818c6f8c4236823d46e20fb8bd53f5"} Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.880856 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2mqd" event={"ID":"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd","Type":"ContainerStarted","Data":"5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d"} Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.883052 5014 generic.go:334] "Generic (PLEG): container finished" podID="39b91742-73f1-4264-842e-3429afa2bbc6" containerID="cf9972fb3af944ba2ccb3237dbd19f9f75ddd966f59ab84f50105a5db40ec3cf" exitCode=0 Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.883328 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r2n62" event={"ID":"39b91742-73f1-4264-842e-3429afa2bbc6","Type":"ContainerDied","Data":"cf9972fb3af944ba2ccb3237dbd19f9f75ddd966f59ab84f50105a5db40ec3cf"} Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.883379 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r2n62" event={"ID":"39b91742-73f1-4264-842e-3429afa2bbc6","Type":"ContainerStarted","Data":"43dd3468ed1ee63e1e4c8b7e74fe01c13df6657af3af04064b7501fe2435f220"} Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.886258 5014 generic.go:334] "Generic (PLEG): container finished" podID="142ed841-421a-47b3-ad45-bc061c5f8e26" containerID="9e318e26bca2098a691a68b2657911cd6e397065a0de5cdfbc18e50bac64aed3" exitCode=0 Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.886328 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28kj9" event={"ID":"142ed841-421a-47b3-ad45-bc061c5f8e26","Type":"ContainerDied","Data":"9e318e26bca2098a691a68b2657911cd6e397065a0de5cdfbc18e50bac64aed3"} Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.886363 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28kj9" event={"ID":"142ed841-421a-47b3-ad45-bc061c5f8e26","Type":"ContainerStarted","Data":"34b18710e895f7f6e3fe0da8d2d6974ad178001a1e3ba5ce6de724bd468d44cf"} Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.923717 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-98n4c" podStartSLOduration=2.487103794 podStartE2EDuration="3.923692807s" podCreationTimestamp="2025-12-05 10:54:16 +0000 UTC" firstStartedPulling="2025-12-05 10:54:17.856309571 +0000 UTC m=+384.804427285" lastFinishedPulling="2025-12-05 10:54:19.292898574 +0000 UTC m=+386.241016298" observedRunningTime="2025-12-05 10:54:19.900853676 +0000 UTC m=+386.848971390" watchObservedRunningTime="2025-12-05 10:54:19.923692807 +0000 UTC m=+386.871810521" Dec 05 10:54:19 crc kubenswrapper[5014]: I1205 10:54:19.973365 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n2mqd" podStartSLOduration=3.566409449 podStartE2EDuration="4.973334916s" podCreationTimestamp="2025-12-05 10:54:15 +0000 UTC" firstStartedPulling="2025-12-05 10:54:17.858568465 +0000 UTC m=+384.806686209" lastFinishedPulling="2025-12-05 10:54:19.265493972 +0000 UTC m=+386.213611676" observedRunningTime="2025-12-05 10:54:19.965741462 +0000 UTC m=+386.913859186" watchObservedRunningTime="2025-12-05 10:54:19.973334916 +0000 UTC m=+386.921452620" Dec 05 10:54:20 crc kubenswrapper[5014]: I1205 10:54:20.895720 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28kj9" event={"ID":"142ed841-421a-47b3-ad45-bc061c5f8e26","Type":"ContainerStarted","Data":"4bdf6f1dabb1f9c328a2560748b6d6d795da65330106cf1cede4e733f9f0e1c5"} Dec 05 10:54:21 crc kubenswrapper[5014]: I1205 10:54:21.903435 5014 generic.go:334] "Generic (PLEG): container finished" podID="142ed841-421a-47b3-ad45-bc061c5f8e26" containerID="4bdf6f1dabb1f9c328a2560748b6d6d795da65330106cf1cede4e733f9f0e1c5" exitCode=0 Dec 05 10:54:21 crc kubenswrapper[5014]: I1205 10:54:21.903706 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28kj9" event={"ID":"142ed841-421a-47b3-ad45-bc061c5f8e26","Type":"ContainerDied","Data":"4bdf6f1dabb1f9c328a2560748b6d6d795da65330106cf1cede4e733f9f0e1c5"} Dec 05 10:54:21 crc kubenswrapper[5014]: I1205 10:54:21.912458 5014 generic.go:334] "Generic (PLEG): container finished" podID="39b91742-73f1-4264-842e-3429afa2bbc6" containerID="370273aa90bb47d21a2ce03834dde4140b9de5cb19fde69461e944614945cb82" exitCode=0 Dec 05 10:54:21 crc kubenswrapper[5014]: I1205 10:54:21.912547 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r2n62" event={"ID":"39b91742-73f1-4264-842e-3429afa2bbc6","Type":"ContainerDied","Data":"370273aa90bb47d21a2ce03834dde4140b9de5cb19fde69461e944614945cb82"} Dec 05 10:54:22 crc kubenswrapper[5014]: I1205 10:54:22.919480 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-28kj9" event={"ID":"142ed841-421a-47b3-ad45-bc061c5f8e26","Type":"ContainerStarted","Data":"d899017cbe59247dfb83edb339e845bad851f02f00f93f7fc93b32d0985f8d20"} Dec 05 10:54:22 crc kubenswrapper[5014]: I1205 10:54:22.922495 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r2n62" event={"ID":"39b91742-73f1-4264-842e-3429afa2bbc6","Type":"ContainerStarted","Data":"6d7ebdae2a387d70698ee79b815b801e1ad872f99e1fdb35b07085ed71742472"} Dec 05 10:54:22 crc kubenswrapper[5014]: I1205 10:54:22.942822 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-28kj9" podStartSLOduration=2.540755798 podStartE2EDuration="4.942798427s" podCreationTimestamp="2025-12-05 10:54:18 +0000 UTC" firstStartedPulling="2025-12-05 10:54:19.888083417 +0000 UTC m=+386.836201121" lastFinishedPulling="2025-12-05 10:54:22.290126046 +0000 UTC m=+389.238243750" observedRunningTime="2025-12-05 10:54:22.939167 +0000 UTC m=+389.887284704" watchObservedRunningTime="2025-12-05 10:54:22.942798427 +0000 UTC m=+389.890916141" Dec 05 10:54:22 crc kubenswrapper[5014]: I1205 10:54:22.961792 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r2n62" podStartSLOduration=2.476123779 podStartE2EDuration="4.961771216s" podCreationTimestamp="2025-12-05 10:54:18 +0000 UTC" firstStartedPulling="2025-12-05 10:54:19.88445733 +0000 UTC m=+386.832575034" lastFinishedPulling="2025-12-05 10:54:22.370104767 +0000 UTC m=+389.318222471" observedRunningTime="2025-12-05 10:54:22.956282743 +0000 UTC m=+389.904400457" watchObservedRunningTime="2025-12-05 10:54:22.961771216 +0000 UTC m=+389.909888910" Dec 05 10:54:26 crc kubenswrapper[5014]: I1205 10:54:26.326345 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:26 crc kubenswrapper[5014]: I1205 10:54:26.326848 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:26 crc kubenswrapper[5014]: I1205 10:54:26.393337 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:26 crc kubenswrapper[5014]: I1205 10:54:26.505587 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:26 crc kubenswrapper[5014]: I1205 10:54:26.505656 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:26 crc kubenswrapper[5014]: I1205 10:54:26.542458 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:26 crc kubenswrapper[5014]: I1205 10:54:26.985615 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-98n4c" Dec 05 10:54:26 crc kubenswrapper[5014]: I1205 10:54:26.987985 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 10:54:28 crc kubenswrapper[5014]: I1205 10:54:28.760428 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:28 crc kubenswrapper[5014]: I1205 10:54:28.761012 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:28 crc kubenswrapper[5014]: I1205 10:54:28.818154 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:28 crc kubenswrapper[5014]: I1205 10:54:28.937700 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:28 crc kubenswrapper[5014]: I1205 10:54:28.937750 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:28 crc kubenswrapper[5014]: I1205 10:54:28.984043 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:29 crc kubenswrapper[5014]: I1205 10:54:29.011090 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r2n62" Dec 05 10:54:29 crc kubenswrapper[5014]: I1205 10:54:29.028524 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-28kj9" Dec 05 10:54:30 crc kubenswrapper[5014]: I1205 10:54:30.732495 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" podUID="425046d9-b7c9-4b15-be69-2b2ab11aad8f" containerName="registry" containerID="cri-o://5fedf437f267dad37f0a2bf0c2a89ea445a55e778158d9170ab9d43374793d41" gracePeriod=30 Dec 05 10:54:31 crc kubenswrapper[5014]: I1205 10:54:31.972630 5014 generic.go:334] "Generic (PLEG): container finished" podID="425046d9-b7c9-4b15-be69-2b2ab11aad8f" containerID="5fedf437f267dad37f0a2bf0c2a89ea445a55e778158d9170ab9d43374793d41" exitCode=0 Dec 05 10:54:31 crc kubenswrapper[5014]: I1205 10:54:31.972722 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" event={"ID":"425046d9-b7c9-4b15-be69-2b2ab11aad8f","Type":"ContainerDied","Data":"5fedf437f267dad37f0a2bf0c2a89ea445a55e778158d9170ab9d43374793d41"} Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.761224 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.793601 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/425046d9-b7c9-4b15-be69-2b2ab11aad8f-ca-trust-extracted\") pod \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.793670 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-bound-sa-token\") pod \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.793878 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.793930 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkr6g\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-kube-api-access-mkr6g\") pod \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.793952 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-certificates\") pod \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.793966 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-tls\") pod \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.794026 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-trusted-ca\") pod \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.794072 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/425046d9-b7c9-4b15-be69-2b2ab11aad8f-installation-pull-secrets\") pod \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\" (UID: \"425046d9-b7c9-4b15-be69-2b2ab11aad8f\") " Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.794856 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "425046d9-b7c9-4b15-be69-2b2ab11aad8f" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.795009 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "425046d9-b7c9-4b15-be69-2b2ab11aad8f" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.805856 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "425046d9-b7c9-4b15-be69-2b2ab11aad8f" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.808086 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "425046d9-b7c9-4b15-be69-2b2ab11aad8f" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.808678 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-kube-api-access-mkr6g" (OuterVolumeSpecName: "kube-api-access-mkr6g") pod "425046d9-b7c9-4b15-be69-2b2ab11aad8f" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f"). InnerVolumeSpecName "kube-api-access-mkr6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.809974 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/425046d9-b7c9-4b15-be69-2b2ab11aad8f-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "425046d9-b7c9-4b15-be69-2b2ab11aad8f" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.815771 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "425046d9-b7c9-4b15-be69-2b2ab11aad8f" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.826134 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/425046d9-b7c9-4b15-be69-2b2ab11aad8f-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "425046d9-b7c9-4b15-be69-2b2ab11aad8f" (UID: "425046d9-b7c9-4b15-be69-2b2ab11aad8f"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.895957 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkr6g\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-kube-api-access-mkr6g\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.895998 5014 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.896010 5014 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.896022 5014 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/425046d9-b7c9-4b15-be69-2b2ab11aad8f-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.896032 5014 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/425046d9-b7c9-4b15-be69-2b2ab11aad8f-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.896042 5014 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/425046d9-b7c9-4b15-be69-2b2ab11aad8f-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.896051 5014 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/425046d9-b7c9-4b15-be69-2b2ab11aad8f-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.936969 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.937031 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.979724 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" event={"ID":"425046d9-b7c9-4b15-be69-2b2ab11aad8f","Type":"ContainerDied","Data":"8433ee1bc332c0fe977cd50f30b397aa5d24b2a5bc5486615b1f71970bfb6912"} Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.979780 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2s2hb" Dec 05 10:54:32 crc kubenswrapper[5014]: I1205 10:54:32.979792 5014 scope.go:117] "RemoveContainer" containerID="5fedf437f267dad37f0a2bf0c2a89ea445a55e778158d9170ab9d43374793d41" Dec 05 10:54:33 crc kubenswrapper[5014]: I1205 10:54:33.005461 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2s2hb"] Dec 05 10:54:33 crc kubenswrapper[5014]: I1205 10:54:33.009075 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2s2hb"] Dec 05 10:54:33 crc kubenswrapper[5014]: I1205 10:54:33.325424 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="425046d9-b7c9-4b15-be69-2b2ab11aad8f" path="/var/lib/kubelet/pods/425046d9-b7c9-4b15-be69-2b2ab11aad8f/volumes" Dec 05 10:55:02 crc kubenswrapper[5014]: I1205 10:55:02.936573 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 10:55:02 crc kubenswrapper[5014]: I1205 10:55:02.937199 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 10:55:02 crc kubenswrapper[5014]: I1205 10:55:02.937256 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:55:02 crc kubenswrapper[5014]: I1205 10:55:02.937947 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"53ecb4867812cd1457c82fb4bd0d6027e193527f1ade465321174a02c10359f9"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 10:55:02 crc kubenswrapper[5014]: I1205 10:55:02.938010 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://53ecb4867812cd1457c82fb4bd0d6027e193527f1ade465321174a02c10359f9" gracePeriod=600 Dec 05 10:55:03 crc kubenswrapper[5014]: I1205 10:55:03.148770 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="53ecb4867812cd1457c82fb4bd0d6027e193527f1ade465321174a02c10359f9" exitCode=0 Dec 05 10:55:03 crc kubenswrapper[5014]: I1205 10:55:03.148822 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"53ecb4867812cd1457c82fb4bd0d6027e193527f1ade465321174a02c10359f9"} Dec 05 10:55:03 crc kubenswrapper[5014]: I1205 10:55:03.149518 5014 scope.go:117] "RemoveContainer" containerID="9174491d963a6da18c9a54ae6ce8c99a8f41e4cc25188f578deed411f33328c9" Dec 05 10:55:04 crc kubenswrapper[5014]: I1205 10:55:04.155584 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"d0d4a87cec920d20a9a11a5c4acd6b4532272ee40a4586c95d621bc0d1e41f59"} Dec 05 10:57:32 crc kubenswrapper[5014]: I1205 10:57:32.936745 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 10:57:32 crc kubenswrapper[5014]: I1205 10:57:32.937418 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 10:58:02 crc kubenswrapper[5014]: I1205 10:58:02.937236 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 10:58:02 crc kubenswrapper[5014]: I1205 10:58:02.937969 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 10:58:32 crc kubenswrapper[5014]: I1205 10:58:32.936666 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 10:58:32 crc kubenswrapper[5014]: I1205 10:58:32.937225 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 10:58:32 crc kubenswrapper[5014]: I1205 10:58:32.937287 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 10:58:32 crc kubenswrapper[5014]: I1205 10:58:32.937830 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d0d4a87cec920d20a9a11a5c4acd6b4532272ee40a4586c95d621bc0d1e41f59"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 10:58:32 crc kubenswrapper[5014]: I1205 10:58:32.937886 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://d0d4a87cec920d20a9a11a5c4acd6b4532272ee40a4586c95d621bc0d1e41f59" gracePeriod=600 Dec 05 10:58:33 crc kubenswrapper[5014]: I1205 10:58:33.568539 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="d0d4a87cec920d20a9a11a5c4acd6b4532272ee40a4586c95d621bc0d1e41f59" exitCode=0 Dec 05 10:58:33 crc kubenswrapper[5014]: I1205 10:58:33.568587 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"d0d4a87cec920d20a9a11a5c4acd6b4532272ee40a4586c95d621bc0d1e41f59"} Dec 05 10:58:33 crc kubenswrapper[5014]: I1205 10:58:33.568889 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"38dc139c6b157093aa0187abc2a47c8fff469ab971f15976ee0dbc61fa5a9ede"} Dec 05 10:58:33 crc kubenswrapper[5014]: I1205 10:58:33.568913 5014 scope.go:117] "RemoveContainer" containerID="53ecb4867812cd1457c82fb4bd0d6027e193527f1ade465321174a02c10359f9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.173262 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9"] Dec 05 11:00:00 crc kubenswrapper[5014]: E1205 11:00:00.174115 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="425046d9-b7c9-4b15-be69-2b2ab11aad8f" containerName="registry" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.174132 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="425046d9-b7c9-4b15-be69-2b2ab11aad8f" containerName="registry" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.174248 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="425046d9-b7c9-4b15-be69-2b2ab11aad8f" containerName="registry" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.174749 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.178233 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.179204 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.188612 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9"] Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.313136 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rftv\" (UniqueName: \"kubernetes.io/projected/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-kube-api-access-6rftv\") pod \"collect-profiles-29415540-s24l9\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.313216 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-secret-volume\") pod \"collect-profiles-29415540-s24l9\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.313246 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-config-volume\") pod \"collect-profiles-29415540-s24l9\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.414050 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rftv\" (UniqueName: \"kubernetes.io/projected/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-kube-api-access-6rftv\") pod \"collect-profiles-29415540-s24l9\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.414111 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-secret-volume\") pod \"collect-profiles-29415540-s24l9\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.414142 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-config-volume\") pod \"collect-profiles-29415540-s24l9\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.415027 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-config-volume\") pod \"collect-profiles-29415540-s24l9\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.420460 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-secret-volume\") pod \"collect-profiles-29415540-s24l9\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.431202 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rftv\" (UniqueName: \"kubernetes.io/projected/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-kube-api-access-6rftv\") pod \"collect-profiles-29415540-s24l9\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.497665 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:00 crc kubenswrapper[5014]: I1205 11:00:00.694306 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9"] Dec 05 11:00:01 crc kubenswrapper[5014]: I1205 11:00:01.073457 5014 generic.go:334] "Generic (PLEG): container finished" podID="5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0" containerID="fbadf84fc634890a184599ae160a12e268c3bccbdf0001d964851c2b985f97e8" exitCode=0 Dec 05 11:00:01 crc kubenswrapper[5014]: I1205 11:00:01.073502 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" event={"ID":"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0","Type":"ContainerDied","Data":"fbadf84fc634890a184599ae160a12e268c3bccbdf0001d964851c2b985f97e8"} Dec 05 11:00:01 crc kubenswrapper[5014]: I1205 11:00:01.073529 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" event={"ID":"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0","Type":"ContainerStarted","Data":"7906faf0430953a79c312add64c9f5e163175e110cb77a87825a5e45d6c96d76"} Dec 05 11:00:02 crc kubenswrapper[5014]: I1205 11:00:02.280888 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:02 crc kubenswrapper[5014]: I1205 11:00:02.443197 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-secret-volume\") pod \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " Dec 05 11:00:02 crc kubenswrapper[5014]: I1205 11:00:02.443247 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-config-volume\") pod \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " Dec 05 11:00:02 crc kubenswrapper[5014]: I1205 11:00:02.443391 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rftv\" (UniqueName: \"kubernetes.io/projected/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-kube-api-access-6rftv\") pod \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\" (UID: \"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0\") " Dec 05 11:00:02 crc kubenswrapper[5014]: I1205 11:00:02.443987 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-config-volume" (OuterVolumeSpecName: "config-volume") pod "5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0" (UID: "5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:00:02 crc kubenswrapper[5014]: I1205 11:00:02.448445 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-kube-api-access-6rftv" (OuterVolumeSpecName: "kube-api-access-6rftv") pod "5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0" (UID: "5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0"). InnerVolumeSpecName "kube-api-access-6rftv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:00:02 crc kubenswrapper[5014]: I1205 11:00:02.448596 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0" (UID: "5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:00:02 crc kubenswrapper[5014]: I1205 11:00:02.545057 5014 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:02 crc kubenswrapper[5014]: I1205 11:00:02.545105 5014 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:02 crc kubenswrapper[5014]: I1205 11:00:02.545121 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rftv\" (UniqueName: \"kubernetes.io/projected/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0-kube-api-access-6rftv\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:03 crc kubenswrapper[5014]: I1205 11:00:03.084786 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" event={"ID":"5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0","Type":"ContainerDied","Data":"7906faf0430953a79c312add64c9f5e163175e110cb77a87825a5e45d6c96d76"} Dec 05 11:00:03 crc kubenswrapper[5014]: I1205 11:00:03.084848 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7906faf0430953a79c312add64c9f5e163175e110cb77a87825a5e45d6c96d76" Dec 05 11:00:03 crc kubenswrapper[5014]: I1205 11:00:03.084861 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9" Dec 05 11:00:22 crc kubenswrapper[5014]: I1205 11:00:22.780748 5014 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.870072 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-vjh5c"] Dec 05 11:00:28 crc kubenswrapper[5014]: E1205 11:00:28.870701 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0" containerName="collect-profiles" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.870719 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0" containerName="collect-profiles" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.870838 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0" containerName="collect-profiles" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.871447 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-vjh5c" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.875448 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.875638 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.876784 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-kk69q"] Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.877994 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-kk69q" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.883778 5014 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-sdpld" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.883975 5014 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-7wfp8" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.892375 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-vjh5c"] Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.905089 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-kk69q"] Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.920340 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-qhbsj"] Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.921448 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-qhbsj" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.923050 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-qhbsj"] Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.923465 5014 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-6mr2x" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.970309 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9klrp\" (UniqueName: \"kubernetes.io/projected/08898b10-ad0c-4b34-bc40-49a86e6da919-kube-api-access-9klrp\") pod \"cert-manager-5b446d88c5-kk69q\" (UID: \"08898b10-ad0c-4b34-bc40-49a86e6da919\") " pod="cert-manager/cert-manager-5b446d88c5-kk69q" Dec 05 11:00:28 crc kubenswrapper[5014]: I1205 11:00:28.970385 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szr4n\" (UniqueName: \"kubernetes.io/projected/ef8d817f-5b79-4efc-aec5-cf9f4133b0e2-kube-api-access-szr4n\") pod \"cert-manager-cainjector-7f985d654d-vjh5c\" (UID: \"ef8d817f-5b79-4efc-aec5-cf9f4133b0e2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-vjh5c" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.071973 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szr4n\" (UniqueName: \"kubernetes.io/projected/ef8d817f-5b79-4efc-aec5-cf9f4133b0e2-kube-api-access-szr4n\") pod \"cert-manager-cainjector-7f985d654d-vjh5c\" (UID: \"ef8d817f-5b79-4efc-aec5-cf9f4133b0e2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-vjh5c" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.072101 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgk8d\" (UniqueName: \"kubernetes.io/projected/43ed4c76-e0f2-4016-8e33-ab3498c5268c-kube-api-access-mgk8d\") pod \"cert-manager-webhook-5655c58dd6-qhbsj\" (UID: \"43ed4c76-e0f2-4016-8e33-ab3498c5268c\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-qhbsj" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.072212 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9klrp\" (UniqueName: \"kubernetes.io/projected/08898b10-ad0c-4b34-bc40-49a86e6da919-kube-api-access-9klrp\") pod \"cert-manager-5b446d88c5-kk69q\" (UID: \"08898b10-ad0c-4b34-bc40-49a86e6da919\") " pod="cert-manager/cert-manager-5b446d88c5-kk69q" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.092136 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9klrp\" (UniqueName: \"kubernetes.io/projected/08898b10-ad0c-4b34-bc40-49a86e6da919-kube-api-access-9klrp\") pod \"cert-manager-5b446d88c5-kk69q\" (UID: \"08898b10-ad0c-4b34-bc40-49a86e6da919\") " pod="cert-manager/cert-manager-5b446d88c5-kk69q" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.092192 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szr4n\" (UniqueName: \"kubernetes.io/projected/ef8d817f-5b79-4efc-aec5-cf9f4133b0e2-kube-api-access-szr4n\") pod \"cert-manager-cainjector-7f985d654d-vjh5c\" (UID: \"ef8d817f-5b79-4efc-aec5-cf9f4133b0e2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-vjh5c" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.173236 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgk8d\" (UniqueName: \"kubernetes.io/projected/43ed4c76-e0f2-4016-8e33-ab3498c5268c-kube-api-access-mgk8d\") pod \"cert-manager-webhook-5655c58dd6-qhbsj\" (UID: \"43ed4c76-e0f2-4016-8e33-ab3498c5268c\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-qhbsj" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.191931 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-vjh5c" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.202294 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-kk69q" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.215100 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgk8d\" (UniqueName: \"kubernetes.io/projected/43ed4c76-e0f2-4016-8e33-ab3498c5268c-kube-api-access-mgk8d\") pod \"cert-manager-webhook-5655c58dd6-qhbsj\" (UID: \"43ed4c76-e0f2-4016-8e33-ab3498c5268c\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-qhbsj" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.237560 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-qhbsj" Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.481074 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-kk69q"] Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.494929 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.763583 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-qhbsj"] Dec 05 11:00:29 crc kubenswrapper[5014]: W1205 11:00:29.769143 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43ed4c76_e0f2_4016_8e33_ab3498c5268c.slice/crio-579fbc6e6db7fb4740bfc6ac22ab85f10b499b845b30efdddbc70e6043a54e9e WatchSource:0}: Error finding container 579fbc6e6db7fb4740bfc6ac22ab85f10b499b845b30efdddbc70e6043a54e9e: Status 404 returned error can't find the container with id 579fbc6e6db7fb4740bfc6ac22ab85f10b499b845b30efdddbc70e6043a54e9e Dec 05 11:00:29 crc kubenswrapper[5014]: I1205 11:00:29.771040 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-vjh5c"] Dec 05 11:00:29 crc kubenswrapper[5014]: W1205 11:00:29.777659 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef8d817f_5b79_4efc_aec5_cf9f4133b0e2.slice/crio-4cae540f73afa5829cce974540eb766aaf0c8a8251c5ca5ce185acf26d1b51c2 WatchSource:0}: Error finding container 4cae540f73afa5829cce974540eb766aaf0c8a8251c5ca5ce185acf26d1b51c2: Status 404 returned error can't find the container with id 4cae540f73afa5829cce974540eb766aaf0c8a8251c5ca5ce185acf26d1b51c2 Dec 05 11:00:30 crc kubenswrapper[5014]: I1205 11:00:30.240638 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-vjh5c" event={"ID":"ef8d817f-5b79-4efc-aec5-cf9f4133b0e2","Type":"ContainerStarted","Data":"4cae540f73afa5829cce974540eb766aaf0c8a8251c5ca5ce185acf26d1b51c2"} Dec 05 11:00:30 crc kubenswrapper[5014]: I1205 11:00:30.243560 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-qhbsj" event={"ID":"43ed4c76-e0f2-4016-8e33-ab3498c5268c","Type":"ContainerStarted","Data":"579fbc6e6db7fb4740bfc6ac22ab85f10b499b845b30efdddbc70e6043a54e9e"} Dec 05 11:00:30 crc kubenswrapper[5014]: I1205 11:00:30.246440 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-kk69q" event={"ID":"08898b10-ad0c-4b34-bc40-49a86e6da919","Type":"ContainerStarted","Data":"300f0d088f8b02a66aac2dcb661263c2f71667fc7779c3ba8c6552db8273f39f"} Dec 05 11:00:32 crc kubenswrapper[5014]: I1205 11:00:32.259330 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-kk69q" event={"ID":"08898b10-ad0c-4b34-bc40-49a86e6da919","Type":"ContainerStarted","Data":"08f488a4ccaf48c8a2df0ec280e51e4c2b713c4dbd19b1f433a00896eae7bc45"} Dec 05 11:00:32 crc kubenswrapper[5014]: I1205 11:00:32.278009 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-kk69q" podStartSLOduration=2.005122045 podStartE2EDuration="4.277983484s" podCreationTimestamp="2025-12-05 11:00:28 +0000 UTC" firstStartedPulling="2025-12-05 11:00:29.494659726 +0000 UTC m=+756.442777430" lastFinishedPulling="2025-12-05 11:00:31.767521165 +0000 UTC m=+758.715638869" observedRunningTime="2025-12-05 11:00:32.273144424 +0000 UTC m=+759.221262219" watchObservedRunningTime="2025-12-05 11:00:32.277983484 +0000 UTC m=+759.226101208" Dec 05 11:00:34 crc kubenswrapper[5014]: I1205 11:00:34.271795 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-qhbsj" event={"ID":"43ed4c76-e0f2-4016-8e33-ab3498c5268c","Type":"ContainerStarted","Data":"b4a8f69d21f81c1a5fd7755629a395caccb98dd4ba6ca307881c6f3824312b4c"} Dec 05 11:00:34 crc kubenswrapper[5014]: I1205 11:00:34.272316 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-qhbsj" Dec 05 11:00:34 crc kubenswrapper[5014]: I1205 11:00:34.273777 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-vjh5c" event={"ID":"ef8d817f-5b79-4efc-aec5-cf9f4133b0e2","Type":"ContainerStarted","Data":"f4114441fb7397fbe42458a4b6bf42b7a1587326c9f3cd816161bfdac5169e09"} Dec 05 11:00:34 crc kubenswrapper[5014]: I1205 11:00:34.288675 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-qhbsj" podStartSLOduration=2.680647201 podStartE2EDuration="6.288651163s" podCreationTimestamp="2025-12-05 11:00:28 +0000 UTC" firstStartedPulling="2025-12-05 11:00:29.771619148 +0000 UTC m=+756.719736852" lastFinishedPulling="2025-12-05 11:00:33.3796231 +0000 UTC m=+760.327740814" observedRunningTime="2025-12-05 11:00:34.283919367 +0000 UTC m=+761.232037071" watchObservedRunningTime="2025-12-05 11:00:34.288651163 +0000 UTC m=+761.236768877" Dec 05 11:00:34 crc kubenswrapper[5014]: I1205 11:00:34.299079 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-vjh5c" podStartSLOduration=2.705777387 podStartE2EDuration="6.299060118s" podCreationTimestamp="2025-12-05 11:00:28 +0000 UTC" firstStartedPulling="2025-12-05 11:00:29.779901141 +0000 UTC m=+756.728018845" lastFinishedPulling="2025-12-05 11:00:33.373183862 +0000 UTC m=+760.321301576" observedRunningTime="2025-12-05 11:00:34.296337291 +0000 UTC m=+761.244454995" watchObservedRunningTime="2025-12-05 11:00:34.299060118 +0000 UTC m=+761.247177822" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.244238 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-qhbsj" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.251758 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-znfbl"] Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.253557 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="northd" containerID="cri-o://77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b" gracePeriod=30 Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.253604 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="sbdb" containerID="cri-o://b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3" gracePeriod=30 Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.253631 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovn-acl-logging" containerID="cri-o://4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00" gracePeriod=30 Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.253623 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="kube-rbac-proxy-node" containerID="cri-o://ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f" gracePeriod=30 Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.253609 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623" gracePeriod=30 Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.253711 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="nbdb" containerID="cri-o://36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c" gracePeriod=30 Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.253528 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovn-controller" containerID="cri-o://96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536" gracePeriod=30 Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.296474 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" containerID="cri-o://6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170" gracePeriod=30 Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.576362 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/3.log" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.579199 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovn-acl-logging/0.log" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.579924 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovn-controller/0.log" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.581005 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.635914 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-h7vfn"] Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636133 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovn-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636151 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovn-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636161 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="northd" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636168 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="northd" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636183 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636191 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636199 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636204 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636210 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636216 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636224 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636230 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636237 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovn-acl-logging" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636244 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovn-acl-logging" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636254 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636261 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636288 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="sbdb" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636295 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="sbdb" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636304 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="kubecfg-setup" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636310 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="kubecfg-setup" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636323 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="nbdb" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636330 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="nbdb" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.636345 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="kube-rbac-proxy-node" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.636351 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="kube-rbac-proxy-node" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638529 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="sbdb" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638544 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638553 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="northd" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638565 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="nbdb" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638576 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638583 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovn-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638591 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638642 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638654 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638663 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovn-acl-logging" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638668 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="kube-rbac-proxy-node" Dec 05 11:00:39 crc kubenswrapper[5014]: E1205 11:00:39.638761 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638768 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.638857 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerName="ovnkube-controller" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.640546 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715322 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-log-socket\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715361 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-netd\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715401 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715429 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-node-log\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715419 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-log-socket" (OuterVolumeSpecName: "log-socket") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715487 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovn-node-metrics-cert\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715508 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-slash\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715511 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715536 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715537 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-script-lib\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715582 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-systemd\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715619 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-var-lib-openvswitch\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715649 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-ovn\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715671 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-bin\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715656 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-slash" (OuterVolumeSpecName: "host-slash") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715692 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-etc-openvswitch\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715709 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-openvswitch\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715698 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-node-log" (OuterVolumeSpecName: "node-log") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715739 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715748 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715730 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-netns\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715718 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715770 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715787 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-kubelet\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715816 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqw9s\" (UniqueName: \"kubernetes.io/projected/41fb1a99-1c51-4281-b73f-8a29357a0a2c-kube-api-access-gqw9s\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715876 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-ovn-kubernetes\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715898 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-config\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715926 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-env-overrides\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715994 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-systemd-units\") pod \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\" (UID: \"41fb1a99-1c51-4281-b73f-8a29357a0a2c\") " Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716235 5014 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716250 5014 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716263 5014 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716299 5014 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716636 5014 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716651 5014 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716666 5014 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716684 5014 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716699 5014 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715791 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715803 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716054 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716408 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716532 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716569 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.715826 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.716787 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.721184 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41fb1a99-1c51-4281-b73f-8a29357a0a2c-kube-api-access-gqw9s" (OuterVolumeSpecName: "kube-api-access-gqw9s") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "kube-api-access-gqw9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.721190 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.731775 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "41fb1a99-1c51-4281-b73f-8a29357a0a2c" (UID: "41fb1a99-1c51-4281-b73f-8a29357a0a2c"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.817926 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhj7c\" (UniqueName: \"kubernetes.io/projected/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-kube-api-access-nhj7c\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.817983 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-etc-openvswitch\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818009 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-slash\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818034 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-run-netns\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818176 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-systemd-units\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818230 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-cni-bin\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818298 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-kubelet\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818348 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-log-socket\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818363 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-cni-netd\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818380 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-env-overrides\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818398 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-var-lib-openvswitch\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818412 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-node-log\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818430 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818543 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-run-openvswitch\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818589 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-ovnkube-script-lib\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818651 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-ovnkube-config\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818726 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-run-ovn\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818761 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-ovn-node-metrics-cert\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818798 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-run-systemd\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818841 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-run-ovn-kubernetes\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.818979 5014 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.819004 5014 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.819025 5014 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.819043 5014 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.819059 5014 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.819075 5014 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.819092 5014 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.819107 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqw9s\" (UniqueName: \"kubernetes.io/projected/41fb1a99-1c51-4281-b73f-8a29357a0a2c-kube-api-access-gqw9s\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.819125 5014 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/41fb1a99-1c51-4281-b73f-8a29357a0a2c-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.819139 5014 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.819150 5014 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/41fb1a99-1c51-4281-b73f-8a29357a0a2c-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920296 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhj7c\" (UniqueName: \"kubernetes.io/projected/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-kube-api-access-nhj7c\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920343 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-etc-openvswitch\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920359 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-slash\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920375 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-run-netns\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920401 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-systemd-units\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920414 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-cni-bin\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920431 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-kubelet\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920451 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-log-socket\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920464 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-cni-netd\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920478 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-env-overrides\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920476 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-etc-openvswitch\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920498 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-var-lib-openvswitch\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920514 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-node-log\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920531 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920551 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-run-openvswitch\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920558 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-slash\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920568 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-ovnkube-script-lib\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920584 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-run-netns\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920615 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-systemd-units\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920664 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-node-log\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920609 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-ovnkube-config\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920698 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-var-lib-openvswitch\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920725 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920747 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-run-ovn\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920753 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-run-openvswitch\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920777 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-ovn-node-metrics-cert\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920787 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-run-ovn\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920807 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-run-systemd\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920814 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-cni-netd\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920846 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-kubelet\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920852 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-run-systemd\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920854 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-log-socket\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920907 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-run-ovn-kubernetes\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920888 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-run-ovn-kubernetes\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.920975 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-host-cni-bin\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.921377 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-ovnkube-script-lib\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.921665 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-env-overrides\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.921774 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-ovnkube-config\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.926574 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-ovn-node-metrics-cert\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.949512 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhj7c\" (UniqueName: \"kubernetes.io/projected/7ca37ce3-a0d9-4d44-b3ce-f803e06e587d-kube-api-access-nhj7c\") pod \"ovnkube-node-h7vfn\" (UID: \"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d\") " pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:39 crc kubenswrapper[5014]: I1205 11:00:39.962485 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.309399 5014 generic.go:334] "Generic (PLEG): container finished" podID="7ca37ce3-a0d9-4d44-b3ce-f803e06e587d" containerID="b5332139304fdfae95bd7d8778bd1a5fc7e6f5f1dd877ad5c75e3461aac4b3ed" exitCode=0 Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.309464 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" event={"ID":"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d","Type":"ContainerDied","Data":"b5332139304fdfae95bd7d8778bd1a5fc7e6f5f1dd877ad5c75e3461aac4b3ed"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.309489 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" event={"ID":"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d","Type":"ContainerStarted","Data":"b8785b6d2b45380052860872299239a8c52087cdbf25e42606921ee82ee5491e"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.313595 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovnkube-controller/3.log" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.319697 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovn-acl-logging/0.log" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320156 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-znfbl_41fb1a99-1c51-4281-b73f-8a29357a0a2c/ovn-controller/0.log" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320501 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170" exitCode=0 Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320526 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3" exitCode=0 Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320533 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c" exitCode=0 Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320540 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b" exitCode=0 Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320549 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623" exitCode=0 Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320555 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f" exitCode=0 Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320561 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00" exitCode=143 Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320568 5014 generic.go:334] "Generic (PLEG): container finished" podID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" containerID="96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536" exitCode=143 Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320604 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320629 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320641 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320650 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320661 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320671 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320680 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320689 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320695 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320700 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320705 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320710 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320714 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320719 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320724 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320730 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320737 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320744 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320748 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320753 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320758 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320763 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320767 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320772 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320777 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320781 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320788 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320795 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320801 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320806 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320811 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320817 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320822 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320827 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320832 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320836 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320841 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320851 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" event={"ID":"41fb1a99-1c51-4281-b73f-8a29357a0a2c","Type":"ContainerDied","Data":"6775976b158719b980925d5d8db944015a6a842e88f312654e326870a8481b0d"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320857 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320863 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320868 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320873 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320878 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320883 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320889 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320894 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320899 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320904 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.320916 5014 scope.go:117] "RemoveContainer" containerID="6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.321036 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-znfbl" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.325880 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/2.log" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.326374 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/1.log" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.326426 5014 generic.go:334] "Generic (PLEG): container finished" podID="f8198e15-3b7a-4c40-b4b3-63382eba5846" containerID="9573c4413ea9c82de910e5cc02c6dfd72517d90499efed04a40ee2df63b5cce8" exitCode=2 Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.326455 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-424mc" event={"ID":"f8198e15-3b7a-4c40-b4b3-63382eba5846","Type":"ContainerDied","Data":"9573c4413ea9c82de910e5cc02c6dfd72517d90499efed04a40ee2df63b5cce8"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.326479 5014 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd"} Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.326805 5014 scope.go:117] "RemoveContainer" containerID="9573c4413ea9c82de910e5cc02c6dfd72517d90499efed04a40ee2df63b5cce8" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.352075 5014 scope.go:117] "RemoveContainer" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.383866 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-znfbl"] Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.387888 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-znfbl"] Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.420943 5014 scope.go:117] "RemoveContainer" containerID="b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.453588 5014 scope.go:117] "RemoveContainer" containerID="36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.465512 5014 scope.go:117] "RemoveContainer" containerID="77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.478660 5014 scope.go:117] "RemoveContainer" containerID="63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.491142 5014 scope.go:117] "RemoveContainer" containerID="ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.507670 5014 scope.go:117] "RemoveContainer" containerID="4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.532300 5014 scope.go:117] "RemoveContainer" containerID="96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.558718 5014 scope.go:117] "RemoveContainer" containerID="3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.580220 5014 scope.go:117] "RemoveContainer" containerID="6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170" Dec 05 11:00:40 crc kubenswrapper[5014]: E1205 11:00:40.580712 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": container with ID starting with 6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170 not found: ID does not exist" containerID="6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.580760 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170"} err="failed to get container status \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": rpc error: code = NotFound desc = could not find container \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": container with ID starting with 6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.580788 5014 scope.go:117] "RemoveContainer" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 11:00:40 crc kubenswrapper[5014]: E1205 11:00:40.581295 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\": container with ID starting with 4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5 not found: ID does not exist" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.581332 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5"} err="failed to get container status \"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\": rpc error: code = NotFound desc = could not find container \"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\": container with ID starting with 4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.581353 5014 scope.go:117] "RemoveContainer" containerID="b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3" Dec 05 11:00:40 crc kubenswrapper[5014]: E1205 11:00:40.581633 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\": container with ID starting with b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3 not found: ID does not exist" containerID="b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.581661 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3"} err="failed to get container status \"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\": rpc error: code = NotFound desc = could not find container \"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\": container with ID starting with b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.581675 5014 scope.go:117] "RemoveContainer" containerID="36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c" Dec 05 11:00:40 crc kubenswrapper[5014]: E1205 11:00:40.581975 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\": container with ID starting with 36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c not found: ID does not exist" containerID="36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.582005 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c"} err="failed to get container status \"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\": rpc error: code = NotFound desc = could not find container \"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\": container with ID starting with 36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.582026 5014 scope.go:117] "RemoveContainer" containerID="77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b" Dec 05 11:00:40 crc kubenswrapper[5014]: E1205 11:00:40.582418 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\": container with ID starting with 77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b not found: ID does not exist" containerID="77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.582446 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b"} err="failed to get container status \"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\": rpc error: code = NotFound desc = could not find container \"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\": container with ID starting with 77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.582463 5014 scope.go:117] "RemoveContainer" containerID="63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623" Dec 05 11:00:40 crc kubenswrapper[5014]: E1205 11:00:40.582805 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\": container with ID starting with 63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623 not found: ID does not exist" containerID="63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.582829 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623"} err="failed to get container status \"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\": rpc error: code = NotFound desc = could not find container \"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\": container with ID starting with 63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.582842 5014 scope.go:117] "RemoveContainer" containerID="ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f" Dec 05 11:00:40 crc kubenswrapper[5014]: E1205 11:00:40.583066 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\": container with ID starting with ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f not found: ID does not exist" containerID="ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.583097 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f"} err="failed to get container status \"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\": rpc error: code = NotFound desc = could not find container \"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\": container with ID starting with ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.583118 5014 scope.go:117] "RemoveContainer" containerID="4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00" Dec 05 11:00:40 crc kubenswrapper[5014]: E1205 11:00:40.583443 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\": container with ID starting with 4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00 not found: ID does not exist" containerID="4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.583472 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00"} err="failed to get container status \"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\": rpc error: code = NotFound desc = could not find container \"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\": container with ID starting with 4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.583491 5014 scope.go:117] "RemoveContainer" containerID="96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536" Dec 05 11:00:40 crc kubenswrapper[5014]: E1205 11:00:40.583888 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\": container with ID starting with 96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536 not found: ID does not exist" containerID="96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.583962 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536"} err="failed to get container status \"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\": rpc error: code = NotFound desc = could not find container \"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\": container with ID starting with 96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.584011 5014 scope.go:117] "RemoveContainer" containerID="3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f" Dec 05 11:00:40 crc kubenswrapper[5014]: E1205 11:00:40.584526 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\": container with ID starting with 3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f not found: ID does not exist" containerID="3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.584558 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f"} err="failed to get container status \"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\": rpc error: code = NotFound desc = could not find container \"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\": container with ID starting with 3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.584576 5014 scope.go:117] "RemoveContainer" containerID="6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.584983 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170"} err="failed to get container status \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": rpc error: code = NotFound desc = could not find container \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": container with ID starting with 6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.585008 5014 scope.go:117] "RemoveContainer" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.585320 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5"} err="failed to get container status \"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\": rpc error: code = NotFound desc = could not find container \"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\": container with ID starting with 4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.585346 5014 scope.go:117] "RemoveContainer" containerID="b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.585672 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3"} err="failed to get container status \"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\": rpc error: code = NotFound desc = could not find container \"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\": container with ID starting with b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.585710 5014 scope.go:117] "RemoveContainer" containerID="36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.586045 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c"} err="failed to get container status \"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\": rpc error: code = NotFound desc = could not find container \"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\": container with ID starting with 36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.586071 5014 scope.go:117] "RemoveContainer" containerID="77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.586365 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b"} err="failed to get container status \"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\": rpc error: code = NotFound desc = could not find container \"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\": container with ID starting with 77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.586403 5014 scope.go:117] "RemoveContainer" containerID="63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.586703 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623"} err="failed to get container status \"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\": rpc error: code = NotFound desc = could not find container \"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\": container with ID starting with 63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.586725 5014 scope.go:117] "RemoveContainer" containerID="ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.586991 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f"} err="failed to get container status \"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\": rpc error: code = NotFound desc = could not find container \"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\": container with ID starting with ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.587019 5014 scope.go:117] "RemoveContainer" containerID="4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.587248 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00"} err="failed to get container status \"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\": rpc error: code = NotFound desc = could not find container \"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\": container with ID starting with 4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.587291 5014 scope.go:117] "RemoveContainer" containerID="96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.587582 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536"} err="failed to get container status \"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\": rpc error: code = NotFound desc = could not find container \"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\": container with ID starting with 96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.587618 5014 scope.go:117] "RemoveContainer" containerID="3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.587906 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f"} err="failed to get container status \"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\": rpc error: code = NotFound desc = could not find container \"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\": container with ID starting with 3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.587937 5014 scope.go:117] "RemoveContainer" containerID="6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.588202 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170"} err="failed to get container status \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": rpc error: code = NotFound desc = could not find container \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": container with ID starting with 6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.588233 5014 scope.go:117] "RemoveContainer" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.588483 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5"} err="failed to get container status \"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\": rpc error: code = NotFound desc = could not find container \"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\": container with ID starting with 4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.588517 5014 scope.go:117] "RemoveContainer" containerID="b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.588801 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3"} err="failed to get container status \"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\": rpc error: code = NotFound desc = could not find container \"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\": container with ID starting with b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.588836 5014 scope.go:117] "RemoveContainer" containerID="36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.589363 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c"} err="failed to get container status \"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\": rpc error: code = NotFound desc = could not find container \"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\": container with ID starting with 36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.589411 5014 scope.go:117] "RemoveContainer" containerID="77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.589727 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b"} err="failed to get container status \"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\": rpc error: code = NotFound desc = could not find container \"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\": container with ID starting with 77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.589754 5014 scope.go:117] "RemoveContainer" containerID="63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.590074 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623"} err="failed to get container status \"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\": rpc error: code = NotFound desc = could not find container \"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\": container with ID starting with 63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.590110 5014 scope.go:117] "RemoveContainer" containerID="ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.590441 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f"} err="failed to get container status \"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\": rpc error: code = NotFound desc = could not find container \"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\": container with ID starting with ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.590464 5014 scope.go:117] "RemoveContainer" containerID="4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.590712 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00"} err="failed to get container status \"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\": rpc error: code = NotFound desc = could not find container \"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\": container with ID starting with 4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.590762 5014 scope.go:117] "RemoveContainer" containerID="96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.591044 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536"} err="failed to get container status \"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\": rpc error: code = NotFound desc = could not find container \"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\": container with ID starting with 96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.591066 5014 scope.go:117] "RemoveContainer" containerID="3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.591311 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f"} err="failed to get container status \"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\": rpc error: code = NotFound desc = could not find container \"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\": container with ID starting with 3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.591335 5014 scope.go:117] "RemoveContainer" containerID="6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.592055 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170"} err="failed to get container status \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": rpc error: code = NotFound desc = could not find container \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": container with ID starting with 6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.592074 5014 scope.go:117] "RemoveContainer" containerID="4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.592499 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5"} err="failed to get container status \"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\": rpc error: code = NotFound desc = could not find container \"4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5\": container with ID starting with 4bdda39c5eccb4367e6e52525ad55a330e020b3fa6033b228b8a756ba9be13e5 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.592534 5014 scope.go:117] "RemoveContainer" containerID="b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.593058 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3"} err="failed to get container status \"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\": rpc error: code = NotFound desc = could not find container \"b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3\": container with ID starting with b76e9f1e3b5e89c736432e6d67eee4bc3f52b9f5e38f59f1044aaa36f29eb0e3 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.593103 5014 scope.go:117] "RemoveContainer" containerID="36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.593450 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c"} err="failed to get container status \"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\": rpc error: code = NotFound desc = could not find container \"36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c\": container with ID starting with 36905ea84fbe977b96a4db358ee4306e2dd6925825dffdde561e7d2b30c7b43c not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.593478 5014 scope.go:117] "RemoveContainer" containerID="77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.593731 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b"} err="failed to get container status \"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\": rpc error: code = NotFound desc = could not find container \"77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b\": container with ID starting with 77e6e70de575cbec40ceb2dfd7150cfc69fac480ac0a9c4e18ecd3258efd156b not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.593753 5014 scope.go:117] "RemoveContainer" containerID="63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.594077 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623"} err="failed to get container status \"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\": rpc error: code = NotFound desc = could not find container \"63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623\": container with ID starting with 63b15adfdda696cccdf431e312b29ecdb397f810dfc78a8d5f44592a0fd4f623 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.594096 5014 scope.go:117] "RemoveContainer" containerID="ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.594353 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f"} err="failed to get container status \"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\": rpc error: code = NotFound desc = could not find container \"ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f\": container with ID starting with ff5cbb967a6519d008ef28f3ca716afc138c5c3759a72b786df352fc948e9b8f not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.594374 5014 scope.go:117] "RemoveContainer" containerID="4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.594686 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00"} err="failed to get container status \"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\": rpc error: code = NotFound desc = could not find container \"4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00\": container with ID starting with 4355c2b72a7561e627ed2815626c495bd850c161695850390f29232ed6f29d00 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.594722 5014 scope.go:117] "RemoveContainer" containerID="96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.595054 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536"} err="failed to get container status \"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\": rpc error: code = NotFound desc = could not find container \"96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536\": container with ID starting with 96366a9f81ec9993b1593b5f185509820bbc642bbfabcad7f8ec0dcf3621d536 not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.595082 5014 scope.go:117] "RemoveContainer" containerID="3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.595437 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f"} err="failed to get container status \"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\": rpc error: code = NotFound desc = could not find container \"3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f\": container with ID starting with 3b6468ca7e8c3b4129205f48e9e0ae35f77632d22223c54f0b31e2c38ba2b09f not found: ID does not exist" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.595471 5014 scope.go:117] "RemoveContainer" containerID="6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170" Dec 05 11:00:40 crc kubenswrapper[5014]: I1205 11:00:40.595766 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170"} err="failed to get container status \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": rpc error: code = NotFound desc = could not find container \"6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170\": container with ID starting with 6e3998c166840d3b60127dc52cc51c19a23f518f8b520bd2e133d2d0ad2f2170 not found: ID does not exist" Dec 05 11:00:41 crc kubenswrapper[5014]: I1205 11:00:41.331664 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41fb1a99-1c51-4281-b73f-8a29357a0a2c" path="/var/lib/kubelet/pods/41fb1a99-1c51-4281-b73f-8a29357a0a2c/volumes" Dec 05 11:00:41 crc kubenswrapper[5014]: I1205 11:00:41.340960 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" event={"ID":"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d","Type":"ContainerStarted","Data":"eb9d7564e532d722deb806997399030ac451f204827d81c2562f9f87f5de917d"} Dec 05 11:00:41 crc kubenswrapper[5014]: I1205 11:00:41.341029 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" event={"ID":"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d","Type":"ContainerStarted","Data":"c3a0213cfa6d2bbc45359fb925f1db5454a50d1be8bed234c5047e34444bccf8"} Dec 05 11:00:41 crc kubenswrapper[5014]: I1205 11:00:41.341057 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" event={"ID":"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d","Type":"ContainerStarted","Data":"7d59e76c8ec319a414543ab9d866b4ebf0225092c6612650acdf6774926b043a"} Dec 05 11:00:41 crc kubenswrapper[5014]: I1205 11:00:41.341082 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" event={"ID":"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d","Type":"ContainerStarted","Data":"71b27394fba3423a3917dbd5e07378500e28ec5e67672829dac4ad7485e65ef1"} Dec 05 11:00:41 crc kubenswrapper[5014]: I1205 11:00:41.341103 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" event={"ID":"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d","Type":"ContainerStarted","Data":"1cd121c57a9c876d319cc8c08858349209334978ad01911a7f3634a8813a7e7f"} Dec 05 11:00:41 crc kubenswrapper[5014]: I1205 11:00:41.341126 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" event={"ID":"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d","Type":"ContainerStarted","Data":"5f8f4f97def0ce1d7d65702cf344705dcde8cc8358eeec7ec5cf7e6400fdbd27"} Dec 05 11:00:41 crc kubenswrapper[5014]: I1205 11:00:41.345001 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/2.log" Dec 05 11:00:41 crc kubenswrapper[5014]: I1205 11:00:41.345532 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/1.log" Dec 05 11:00:41 crc kubenswrapper[5014]: I1205 11:00:41.345569 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-424mc" event={"ID":"f8198e15-3b7a-4c40-b4b3-63382eba5846","Type":"ContainerStarted","Data":"196e8c956a012eb99e3f841641896078b73da5d1b23e759c8df1568686e6ef42"} Dec 05 11:00:43 crc kubenswrapper[5014]: I1205 11:00:43.361128 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" event={"ID":"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d","Type":"ContainerStarted","Data":"c840badb17a7a01eecded29a2bd8978b6886a7d0cceef13c303be944a9b49a9e"} Dec 05 11:00:46 crc kubenswrapper[5014]: I1205 11:00:46.388976 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" event={"ID":"7ca37ce3-a0d9-4d44-b3ce-f803e06e587d","Type":"ContainerStarted","Data":"6c5dc7a20264d8b839462dccddc38a662a6e69898d60eab48a56bf3737aafede"} Dec 05 11:00:46 crc kubenswrapper[5014]: I1205 11:00:46.389497 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:46 crc kubenswrapper[5014]: I1205 11:00:46.389510 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:46 crc kubenswrapper[5014]: I1205 11:00:46.389519 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:46 crc kubenswrapper[5014]: I1205 11:00:46.415941 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" podStartSLOduration=7.415923274 podStartE2EDuration="7.415923274s" podCreationTimestamp="2025-12-05 11:00:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:00:46.413360942 +0000 UTC m=+773.361478666" watchObservedRunningTime="2025-12-05 11:00:46.415923274 +0000 UTC m=+773.364040978" Dec 05 11:00:46 crc kubenswrapper[5014]: I1205 11:00:46.420224 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:46 crc kubenswrapper[5014]: I1205 11:00:46.421192 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:00:53 crc kubenswrapper[5014]: I1205 11:00:53.690462 5014 scope.go:117] "RemoveContainer" containerID="f239c80247ae55cdb52a1e2c2dedc9c460c294a6027028292ad8b8a0ac3be0bd" Dec 05 11:00:54 crc kubenswrapper[5014]: I1205 11:00:54.441254 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-424mc_f8198e15-3b7a-4c40-b4b3-63382eba5846/kube-multus/2.log" Dec 05 11:01:02 crc kubenswrapper[5014]: I1205 11:01:02.936364 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:01:02 crc kubenswrapper[5014]: I1205 11:01:02.936928 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:01:10 crc kubenswrapper[5014]: I1205 11:01:09.997433 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-h7vfn" Dec 05 11:01:23 crc kubenswrapper[5014]: I1205 11:01:23.910858 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4"] Dec 05 11:01:23 crc kubenswrapper[5014]: I1205 11:01:23.913611 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:23 crc kubenswrapper[5014]: I1205 11:01:23.916843 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 11:01:23 crc kubenswrapper[5014]: I1205 11:01:23.922043 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4"] Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.023130 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vggn\" (UniqueName: \"kubernetes.io/projected/aea811f6-366f-49c8-853b-ae13ea1a6e2d-kube-api-access-8vggn\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.023230 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.023409 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.124958 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.125033 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.125071 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vggn\" (UniqueName: \"kubernetes.io/projected/aea811f6-366f-49c8-853b-ae13ea1a6e2d-kube-api-access-8vggn\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.125702 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.125776 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.150348 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vggn\" (UniqueName: \"kubernetes.io/projected/aea811f6-366f-49c8-853b-ae13ea1a6e2d-kube-api-access-8vggn\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.230781 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:24 crc kubenswrapper[5014]: I1205 11:01:24.486691 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4"] Dec 05 11:01:25 crc kubenswrapper[5014]: I1205 11:01:25.458905 5014 generic.go:334] "Generic (PLEG): container finished" podID="aea811f6-366f-49c8-853b-ae13ea1a6e2d" containerID="c5ec57aad370cbd773a79e9ce828bc8dceeeb1cc817f2d1d3c070f54d1774722" exitCode=0 Dec 05 11:01:25 crc kubenswrapper[5014]: I1205 11:01:25.459033 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" event={"ID":"aea811f6-366f-49c8-853b-ae13ea1a6e2d","Type":"ContainerDied","Data":"c5ec57aad370cbd773a79e9ce828bc8dceeeb1cc817f2d1d3c070f54d1774722"} Dec 05 11:01:25 crc kubenswrapper[5014]: I1205 11:01:25.459552 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" event={"ID":"aea811f6-366f-49c8-853b-ae13ea1a6e2d","Type":"ContainerStarted","Data":"ddb04efaa05ee7b6ae7ec21149c0eb3d4c77644a6135a3bdbe5ab5c46917d113"} Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.261631 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hw27t"] Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.263513 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.283157 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hw27t"] Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.368383 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-catalog-content\") pod \"redhat-operators-hw27t\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.368514 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-utilities\") pod \"redhat-operators-hw27t\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.368566 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqlh4\" (UniqueName: \"kubernetes.io/projected/5f3d9418-c338-4e09-a522-2b12f5701194-kube-api-access-vqlh4\") pod \"redhat-operators-hw27t\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.470122 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqlh4\" (UniqueName: \"kubernetes.io/projected/5f3d9418-c338-4e09-a522-2b12f5701194-kube-api-access-vqlh4\") pod \"redhat-operators-hw27t\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.470731 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-catalog-content\") pod \"redhat-operators-hw27t\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.470976 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-utilities\") pod \"redhat-operators-hw27t\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.471386 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-catalog-content\") pod \"redhat-operators-hw27t\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.471588 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-utilities\") pod \"redhat-operators-hw27t\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.503741 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqlh4\" (UniqueName: \"kubernetes.io/projected/5f3d9418-c338-4e09-a522-2b12f5701194-kube-api-access-vqlh4\") pod \"redhat-operators-hw27t\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.591293 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:26 crc kubenswrapper[5014]: I1205 11:01:26.823160 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hw27t"] Dec 05 11:01:26 crc kubenswrapper[5014]: W1205 11:01:26.834317 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f3d9418_c338_4e09_a522_2b12f5701194.slice/crio-27491dc2986cbdb12d00b7d23ef8e8970509f80748444c5060ee64a6aaf19649 WatchSource:0}: Error finding container 27491dc2986cbdb12d00b7d23ef8e8970509f80748444c5060ee64a6aaf19649: Status 404 returned error can't find the container with id 27491dc2986cbdb12d00b7d23ef8e8970509f80748444c5060ee64a6aaf19649 Dec 05 11:01:27 crc kubenswrapper[5014]: I1205 11:01:27.473790 5014 generic.go:334] "Generic (PLEG): container finished" podID="aea811f6-366f-49c8-853b-ae13ea1a6e2d" containerID="988e0330ebf2adb9c6ca31a389aaea07a49f1dbbb09b48bd8ab30db0c1edbe97" exitCode=0 Dec 05 11:01:27 crc kubenswrapper[5014]: I1205 11:01:27.474482 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" event={"ID":"aea811f6-366f-49c8-853b-ae13ea1a6e2d","Type":"ContainerDied","Data":"988e0330ebf2adb9c6ca31a389aaea07a49f1dbbb09b48bd8ab30db0c1edbe97"} Dec 05 11:01:27 crc kubenswrapper[5014]: I1205 11:01:27.486549 5014 generic.go:334] "Generic (PLEG): container finished" podID="5f3d9418-c338-4e09-a522-2b12f5701194" containerID="17d5237827f596f36772d83c885c9e50d2190e259b582c08910624a4a9f2a873" exitCode=0 Dec 05 11:01:27 crc kubenswrapper[5014]: I1205 11:01:27.486633 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hw27t" event={"ID":"5f3d9418-c338-4e09-a522-2b12f5701194","Type":"ContainerDied","Data":"17d5237827f596f36772d83c885c9e50d2190e259b582c08910624a4a9f2a873"} Dec 05 11:01:27 crc kubenswrapper[5014]: I1205 11:01:27.486685 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hw27t" event={"ID":"5f3d9418-c338-4e09-a522-2b12f5701194","Type":"ContainerStarted","Data":"27491dc2986cbdb12d00b7d23ef8e8970509f80748444c5060ee64a6aaf19649"} Dec 05 11:01:28 crc kubenswrapper[5014]: I1205 11:01:28.493464 5014 generic.go:334] "Generic (PLEG): container finished" podID="aea811f6-366f-49c8-853b-ae13ea1a6e2d" containerID="5571a84e9352dc19322dbbf8a21f65c5f0df5880f1ae2274ab5847ae58f0cc52" exitCode=0 Dec 05 11:01:28 crc kubenswrapper[5014]: I1205 11:01:28.493517 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" event={"ID":"aea811f6-366f-49c8-853b-ae13ea1a6e2d","Type":"ContainerDied","Data":"5571a84e9352dc19322dbbf8a21f65c5f0df5880f1ae2274ab5847ae58f0cc52"} Dec 05 11:01:29 crc kubenswrapper[5014]: I1205 11:01:29.499898 5014 generic.go:334] "Generic (PLEG): container finished" podID="5f3d9418-c338-4e09-a522-2b12f5701194" containerID="4b4b383d2d9c9f391e05919ee129751e693e0873e3b9b4e09a6bb8008703619e" exitCode=0 Dec 05 11:01:29 crc kubenswrapper[5014]: I1205 11:01:29.499948 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hw27t" event={"ID":"5f3d9418-c338-4e09-a522-2b12f5701194","Type":"ContainerDied","Data":"4b4b383d2d9c9f391e05919ee129751e693e0873e3b9b4e09a6bb8008703619e"} Dec 05 11:01:29 crc kubenswrapper[5014]: I1205 11:01:29.766995 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:29 crc kubenswrapper[5014]: I1205 11:01:29.924487 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-bundle\") pod \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " Dec 05 11:01:29 crc kubenswrapper[5014]: I1205 11:01:29.924963 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-util\") pod \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " Dec 05 11:01:29 crc kubenswrapper[5014]: I1205 11:01:29.925036 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vggn\" (UniqueName: \"kubernetes.io/projected/aea811f6-366f-49c8-853b-ae13ea1a6e2d-kube-api-access-8vggn\") pod \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\" (UID: \"aea811f6-366f-49c8-853b-ae13ea1a6e2d\") " Dec 05 11:01:29 crc kubenswrapper[5014]: I1205 11:01:29.925213 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-bundle" (OuterVolumeSpecName: "bundle") pod "aea811f6-366f-49c8-853b-ae13ea1a6e2d" (UID: "aea811f6-366f-49c8-853b-ae13ea1a6e2d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:01:29 crc kubenswrapper[5014]: I1205 11:01:29.925361 5014 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:01:29 crc kubenswrapper[5014]: I1205 11:01:29.935668 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aea811f6-366f-49c8-853b-ae13ea1a6e2d-kube-api-access-8vggn" (OuterVolumeSpecName: "kube-api-access-8vggn") pod "aea811f6-366f-49c8-853b-ae13ea1a6e2d" (UID: "aea811f6-366f-49c8-853b-ae13ea1a6e2d"). InnerVolumeSpecName "kube-api-access-8vggn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:01:29 crc kubenswrapper[5014]: I1205 11:01:29.962528 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-util" (OuterVolumeSpecName: "util") pod "aea811f6-366f-49c8-853b-ae13ea1a6e2d" (UID: "aea811f6-366f-49c8-853b-ae13ea1a6e2d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:01:30 crc kubenswrapper[5014]: I1205 11:01:30.027068 5014 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aea811f6-366f-49c8-853b-ae13ea1a6e2d-util\") on node \"crc\" DevicePath \"\"" Dec 05 11:01:30 crc kubenswrapper[5014]: I1205 11:01:30.027106 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vggn\" (UniqueName: \"kubernetes.io/projected/aea811f6-366f-49c8-853b-ae13ea1a6e2d-kube-api-access-8vggn\") on node \"crc\" DevicePath \"\"" Dec 05 11:01:30 crc kubenswrapper[5014]: I1205 11:01:30.511594 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" event={"ID":"aea811f6-366f-49c8-853b-ae13ea1a6e2d","Type":"ContainerDied","Data":"ddb04efaa05ee7b6ae7ec21149c0eb3d4c77644a6135a3bdbe5ab5c46917d113"} Dec 05 11:01:30 crc kubenswrapper[5014]: I1205 11:01:30.511659 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ddb04efaa05ee7b6ae7ec21149c0eb3d4c77644a6135a3bdbe5ab5c46917d113" Dec 05 11:01:30 crc kubenswrapper[5014]: I1205 11:01:30.511619 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4" Dec 05 11:01:30 crc kubenswrapper[5014]: I1205 11:01:30.514860 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hw27t" event={"ID":"5f3d9418-c338-4e09-a522-2b12f5701194","Type":"ContainerStarted","Data":"97ffcac415bb0f3bd04f8aaa47ef902d55ae86c6f32c4d62fb9dee983ec2d5b8"} Dec 05 11:01:30 crc kubenswrapper[5014]: I1205 11:01:30.534326 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hw27t" podStartSLOduration=2.091937854 podStartE2EDuration="4.53430502s" podCreationTimestamp="2025-12-05 11:01:26 +0000 UTC" firstStartedPulling="2025-12-05 11:01:27.500005389 +0000 UTC m=+814.448123103" lastFinishedPulling="2025-12-05 11:01:29.942372565 +0000 UTC m=+816.890490269" observedRunningTime="2025-12-05 11:01:30.532353663 +0000 UTC m=+817.480471387" watchObservedRunningTime="2025-12-05 11:01:30.53430502 +0000 UTC m=+817.482422724" Dec 05 11:01:32 crc kubenswrapper[5014]: I1205 11:01:32.937695 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:01:32 crc kubenswrapper[5014]: I1205 11:01:32.938384 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.390768 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5"] Dec 05 11:01:34 crc kubenswrapper[5014]: E1205 11:01:34.392650 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea811f6-366f-49c8-853b-ae13ea1a6e2d" containerName="extract" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.392685 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea811f6-366f-49c8-853b-ae13ea1a6e2d" containerName="extract" Dec 05 11:01:34 crc kubenswrapper[5014]: E1205 11:01:34.392710 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea811f6-366f-49c8-853b-ae13ea1a6e2d" containerName="pull" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.392721 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea811f6-366f-49c8-853b-ae13ea1a6e2d" containerName="pull" Dec 05 11:01:34 crc kubenswrapper[5014]: E1205 11:01:34.392741 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea811f6-366f-49c8-853b-ae13ea1a6e2d" containerName="util" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.392750 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea811f6-366f-49c8-853b-ae13ea1a6e2d" containerName="util" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.392885 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="aea811f6-366f-49c8-853b-ae13ea1a6e2d" containerName="extract" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.393416 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.398779 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.398855 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.400227 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-ss4n8" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.409717 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5"] Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.486639 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfvvq\" (UniqueName: \"kubernetes.io/projected/8d7613b6-e062-416c-87e5-428a84a9d24f-kube-api-access-qfvvq\") pod \"nmstate-operator-5b5b58f5c8-vvdv5\" (UID: \"8d7613b6-e062-416c-87e5-428a84a9d24f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.588396 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfvvq\" (UniqueName: \"kubernetes.io/projected/8d7613b6-e062-416c-87e5-428a84a9d24f-kube-api-access-qfvvq\") pod \"nmstate-operator-5b5b58f5c8-vvdv5\" (UID: \"8d7613b6-e062-416c-87e5-428a84a9d24f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.614508 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfvvq\" (UniqueName: \"kubernetes.io/projected/8d7613b6-e062-416c-87e5-428a84a9d24f-kube-api-access-qfvvq\") pod \"nmstate-operator-5b5b58f5c8-vvdv5\" (UID: \"8d7613b6-e062-416c-87e5-428a84a9d24f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5" Dec 05 11:01:34 crc kubenswrapper[5014]: I1205 11:01:34.709537 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5" Dec 05 11:01:35 crc kubenswrapper[5014]: I1205 11:01:35.194694 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5"] Dec 05 11:01:35 crc kubenswrapper[5014]: I1205 11:01:35.540227 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5" event={"ID":"8d7613b6-e062-416c-87e5-428a84a9d24f","Type":"ContainerStarted","Data":"2cd0207c352afd15c0373435673008243742b5596cca5e52a181da9c352efe0f"} Dec 05 11:01:36 crc kubenswrapper[5014]: I1205 11:01:36.591898 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:36 crc kubenswrapper[5014]: I1205 11:01:36.592184 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:36 crc kubenswrapper[5014]: I1205 11:01:36.627980 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:37 crc kubenswrapper[5014]: I1205 11:01:37.600971 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:38 crc kubenswrapper[5014]: I1205 11:01:38.564921 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5" event={"ID":"8d7613b6-e062-416c-87e5-428a84a9d24f","Type":"ContainerStarted","Data":"7e7cabe1f30e9e2175615e04a1aac91ba4352575ccf875621cdad8eee60d63bf"} Dec 05 11:01:38 crc kubenswrapper[5014]: I1205 11:01:38.585828 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vvdv5" podStartSLOduration=1.498091272 podStartE2EDuration="4.585809174s" podCreationTimestamp="2025-12-05 11:01:34 +0000 UTC" firstStartedPulling="2025-12-05 11:01:35.206803938 +0000 UTC m=+822.154921632" lastFinishedPulling="2025-12-05 11:01:38.29452183 +0000 UTC m=+825.242639534" observedRunningTime="2025-12-05 11:01:38.581658002 +0000 UTC m=+825.529775726" watchObservedRunningTime="2025-12-05 11:01:38.585809174 +0000 UTC m=+825.533926888" Dec 05 11:01:39 crc kubenswrapper[5014]: I1205 11:01:39.251128 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hw27t"] Dec 05 11:01:39 crc kubenswrapper[5014]: I1205 11:01:39.570436 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hw27t" podUID="5f3d9418-c338-4e09-a522-2b12f5701194" containerName="registry-server" containerID="cri-o://97ffcac415bb0f3bd04f8aaa47ef902d55ae86c6f32c4d62fb9dee983ec2d5b8" gracePeriod=2 Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.585830 5014 generic.go:334] "Generic (PLEG): container finished" podID="5f3d9418-c338-4e09-a522-2b12f5701194" containerID="97ffcac415bb0f3bd04f8aaa47ef902d55ae86c6f32c4d62fb9dee983ec2d5b8" exitCode=0 Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.585937 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hw27t" event={"ID":"5f3d9418-c338-4e09-a522-2b12f5701194","Type":"ContainerDied","Data":"97ffcac415bb0f3bd04f8aaa47ef902d55ae86c6f32c4d62fb9dee983ec2d5b8"} Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.742949 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.883111 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-utilities\") pod \"5f3d9418-c338-4e09-a522-2b12f5701194\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.883243 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-catalog-content\") pod \"5f3d9418-c338-4e09-a522-2b12f5701194\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.883435 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqlh4\" (UniqueName: \"kubernetes.io/projected/5f3d9418-c338-4e09-a522-2b12f5701194-kube-api-access-vqlh4\") pod \"5f3d9418-c338-4e09-a522-2b12f5701194\" (UID: \"5f3d9418-c338-4e09-a522-2b12f5701194\") " Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.885954 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-utilities" (OuterVolumeSpecName: "utilities") pod "5f3d9418-c338-4e09-a522-2b12f5701194" (UID: "5f3d9418-c338-4e09-a522-2b12f5701194"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.889868 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f3d9418-c338-4e09-a522-2b12f5701194-kube-api-access-vqlh4" (OuterVolumeSpecName: "kube-api-access-vqlh4") pod "5f3d9418-c338-4e09-a522-2b12f5701194" (UID: "5f3d9418-c338-4e09-a522-2b12f5701194"). InnerVolumeSpecName "kube-api-access-vqlh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.985989 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqlh4\" (UniqueName: \"kubernetes.io/projected/5f3d9418-c338-4e09-a522-2b12f5701194-kube-api-access-vqlh4\") on node \"crc\" DevicePath \"\"" Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.986035 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:01:41 crc kubenswrapper[5014]: I1205 11:01:41.987918 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f3d9418-c338-4e09-a522-2b12f5701194" (UID: "5f3d9418-c338-4e09-a522-2b12f5701194"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:01:42 crc kubenswrapper[5014]: I1205 11:01:42.086967 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f3d9418-c338-4e09-a522-2b12f5701194-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:01:42 crc kubenswrapper[5014]: I1205 11:01:42.597869 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hw27t" event={"ID":"5f3d9418-c338-4e09-a522-2b12f5701194","Type":"ContainerDied","Data":"27491dc2986cbdb12d00b7d23ef8e8970509f80748444c5060ee64a6aaf19649"} Dec 05 11:01:42 crc kubenswrapper[5014]: I1205 11:01:42.598058 5014 scope.go:117] "RemoveContainer" containerID="97ffcac415bb0f3bd04f8aaa47ef902d55ae86c6f32c4d62fb9dee983ec2d5b8" Dec 05 11:01:42 crc kubenswrapper[5014]: I1205 11:01:42.598935 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hw27t" Dec 05 11:01:42 crc kubenswrapper[5014]: I1205 11:01:42.623425 5014 scope.go:117] "RemoveContainer" containerID="4b4b383d2d9c9f391e05919ee129751e693e0873e3b9b4e09a6bb8008703619e" Dec 05 11:01:42 crc kubenswrapper[5014]: I1205 11:01:42.638897 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hw27t"] Dec 05 11:01:42 crc kubenswrapper[5014]: I1205 11:01:42.652333 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hw27t"] Dec 05 11:01:42 crc kubenswrapper[5014]: I1205 11:01:42.669210 5014 scope.go:117] "RemoveContainer" containerID="17d5237827f596f36772d83c885c9e50d2190e259b582c08910624a4a9f2a873" Dec 05 11:01:43 crc kubenswrapper[5014]: I1205 11:01:43.324699 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f3d9418-c338-4e09-a522-2b12f5701194" path="/var/lib/kubelet/pods/5f3d9418-c338-4e09-a522-2b12f5701194/volumes" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.374072 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q"] Dec 05 11:01:44 crc kubenswrapper[5014]: E1205 11:01:44.374338 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f3d9418-c338-4e09-a522-2b12f5701194" containerName="extract-utilities" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.374355 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f3d9418-c338-4e09-a522-2b12f5701194" containerName="extract-utilities" Dec 05 11:01:44 crc kubenswrapper[5014]: E1205 11:01:44.374376 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f3d9418-c338-4e09-a522-2b12f5701194" containerName="registry-server" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.374383 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f3d9418-c338-4e09-a522-2b12f5701194" containerName="registry-server" Dec 05 11:01:44 crc kubenswrapper[5014]: E1205 11:01:44.374400 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f3d9418-c338-4e09-a522-2b12f5701194" containerName="extract-content" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.374408 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f3d9418-c338-4e09-a522-2b12f5701194" containerName="extract-content" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.374528 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f3d9418-c338-4e09-a522-2b12f5701194" containerName="registry-server" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.375292 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.380921 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-bsgv7" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.385898 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q"] Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.390237 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf"] Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.390934 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.393135 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.407935 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf"] Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.414220 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66bsh\" (UniqueName: \"kubernetes.io/projected/47da56e6-6794-48dc-a7e6-99e6b63ecf43-kube-api-access-66bsh\") pod \"nmstate-metrics-7f946cbc9-qkl7q\" (UID: \"47da56e6-6794-48dc-a7e6-99e6b63ecf43\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.418957 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-44tzw"] Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.423457 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.504289 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv"] Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.504897 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.510581 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.510815 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.511000 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-fm76t" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.511842 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv"] Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.515955 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f73b5791-5c0a-4c9f-a78f-9ed2615f4538-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-rnnzf\" (UID: \"f73b5791-5c0a-4c9f-a78f-9ed2615f4538\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.516023 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-dbus-socket\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.516040 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ll87\" (UniqueName: \"kubernetes.io/projected/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-kube-api-access-4ll87\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.516056 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-ovs-socket\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.516071 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd5fv\" (UniqueName: \"kubernetes.io/projected/f73b5791-5c0a-4c9f-a78f-9ed2615f4538-kube-api-access-bd5fv\") pod \"nmstate-webhook-5f6d4c5ccb-rnnzf\" (UID: \"f73b5791-5c0a-4c9f-a78f-9ed2615f4538\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.516092 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-nmstate-lock\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.516121 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66bsh\" (UniqueName: \"kubernetes.io/projected/47da56e6-6794-48dc-a7e6-99e6b63ecf43-kube-api-access-66bsh\") pod \"nmstate-metrics-7f946cbc9-qkl7q\" (UID: \"47da56e6-6794-48dc-a7e6-99e6b63ecf43\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.540717 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66bsh\" (UniqueName: \"kubernetes.io/projected/47da56e6-6794-48dc-a7e6-99e6b63ecf43-kube-api-access-66bsh\") pod \"nmstate-metrics-7f946cbc9-qkl7q\" (UID: \"47da56e6-6794-48dc-a7e6-99e6b63ecf43\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617034 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-dbus-socket\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617089 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ll87\" (UniqueName: \"kubernetes.io/projected/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-kube-api-access-4ll87\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617123 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-ovs-socket\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617144 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd5fv\" (UniqueName: \"kubernetes.io/projected/f73b5791-5c0a-4c9f-a78f-9ed2615f4538-kube-api-access-bd5fv\") pod \"nmstate-webhook-5f6d4c5ccb-rnnzf\" (UID: \"f73b5791-5c0a-4c9f-a78f-9ed2615f4538\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617173 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/043084bc-abeb-4cb7-bea1-7dae70ac655d-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-fzxsv\" (UID: \"043084bc-abeb-4cb7-bea1-7dae70ac655d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617198 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-nmstate-lock\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617243 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f73b5791-5c0a-4c9f-a78f-9ed2615f4538-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-rnnzf\" (UID: \"f73b5791-5c0a-4c9f-a78f-9ed2615f4538\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617270 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kstgb\" (UniqueName: \"kubernetes.io/projected/043084bc-abeb-4cb7-bea1-7dae70ac655d-kube-api-access-kstgb\") pod \"nmstate-console-plugin-7fbb5f6569-fzxsv\" (UID: \"043084bc-abeb-4cb7-bea1-7dae70ac655d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617348 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/043084bc-abeb-4cb7-bea1-7dae70ac655d-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-fzxsv\" (UID: \"043084bc-abeb-4cb7-bea1-7dae70ac655d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617467 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-dbus-socket\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617546 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-ovs-socket\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.617704 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-nmstate-lock\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.626892 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f73b5791-5c0a-4c9f-a78f-9ed2615f4538-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-rnnzf\" (UID: \"f73b5791-5c0a-4c9f-a78f-9ed2615f4538\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.653097 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ll87\" (UniqueName: \"kubernetes.io/projected/a9df2f02-fdb6-46dc-bd30-25b7b4a2d357-kube-api-access-4ll87\") pod \"nmstate-handler-44tzw\" (UID: \"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357\") " pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.672123 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd5fv\" (UniqueName: \"kubernetes.io/projected/f73b5791-5c0a-4c9f-a78f-9ed2615f4538-kube-api-access-bd5fv\") pod \"nmstate-webhook-5f6d4c5ccb-rnnzf\" (UID: \"f73b5791-5c0a-4c9f-a78f-9ed2615f4538\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.693812 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-56dd54f94b-9cwbv"] Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.694715 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.713807 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-56dd54f94b-9cwbv"] Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.719022 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kstgb\" (UniqueName: \"kubernetes.io/projected/043084bc-abeb-4cb7-bea1-7dae70ac655d-kube-api-access-kstgb\") pod \"nmstate-console-plugin-7fbb5f6569-fzxsv\" (UID: \"043084bc-abeb-4cb7-bea1-7dae70ac655d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.719085 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/043084bc-abeb-4cb7-bea1-7dae70ac655d-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-fzxsv\" (UID: \"043084bc-abeb-4cb7-bea1-7dae70ac655d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.719158 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/043084bc-abeb-4cb7-bea1-7dae70ac655d-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-fzxsv\" (UID: \"043084bc-abeb-4cb7-bea1-7dae70ac655d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:44 crc kubenswrapper[5014]: E1205 11:01:44.719312 5014 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 05 11:01:44 crc kubenswrapper[5014]: E1205 11:01:44.719410 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/043084bc-abeb-4cb7-bea1-7dae70ac655d-plugin-serving-cert podName:043084bc-abeb-4cb7-bea1-7dae70ac655d nodeName:}" failed. No retries permitted until 2025-12-05 11:01:45.219391583 +0000 UTC m=+832.167509277 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/043084bc-abeb-4cb7-bea1-7dae70ac655d-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-fzxsv" (UID: "043084bc-abeb-4cb7-bea1-7dae70ac655d") : secret "plugin-serving-cert" not found Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.720057 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/043084bc-abeb-4cb7-bea1-7dae70ac655d-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-fzxsv\" (UID: \"043084bc-abeb-4cb7-bea1-7dae70ac655d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.736872 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.737431 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kstgb\" (UniqueName: \"kubernetes.io/projected/043084bc-abeb-4cb7-bea1-7dae70ac655d-kube-api-access-kstgb\") pod \"nmstate-console-plugin-7fbb5f6569-fzxsv\" (UID: \"043084bc-abeb-4cb7-bea1-7dae70ac655d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.751896 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.761797 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.822706 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/586cf076-aed2-45c9-a74b-0f379c070775-console-oauth-config\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.822752 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-oauth-serving-cert\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.822782 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/586cf076-aed2-45c9-a74b-0f379c070775-console-serving-cert\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.822835 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-console-config\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.822895 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqmf9\" (UniqueName: \"kubernetes.io/projected/586cf076-aed2-45c9-a74b-0f379c070775-kube-api-access-tqmf9\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.822922 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-service-ca\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.822967 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-trusted-ca-bundle\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.924347 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/586cf076-aed2-45c9-a74b-0f379c070775-console-oauth-config\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.924408 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-oauth-serving-cert\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.924464 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/586cf076-aed2-45c9-a74b-0f379c070775-console-serving-cert\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.924492 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-console-config\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.924546 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqmf9\" (UniqueName: \"kubernetes.io/projected/586cf076-aed2-45c9-a74b-0f379c070775-kube-api-access-tqmf9\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.924563 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-service-ca\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.924614 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-trusted-ca-bundle\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.926245 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-console-config\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.926256 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-oauth-serving-cert\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.926635 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-trusted-ca-bundle\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.926718 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/586cf076-aed2-45c9-a74b-0f379c070775-service-ca\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.931167 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/586cf076-aed2-45c9-a74b-0f379c070775-console-serving-cert\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.932260 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/586cf076-aed2-45c9-a74b-0f379c070775-console-oauth-config\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.941859 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqmf9\" (UniqueName: \"kubernetes.io/projected/586cf076-aed2-45c9-a74b-0f379c070775-kube-api-access-tqmf9\") pod \"console-56dd54f94b-9cwbv\" (UID: \"586cf076-aed2-45c9-a74b-0f379c070775\") " pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:44 crc kubenswrapper[5014]: I1205 11:01:44.964074 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf"] Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.012552 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.018474 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q"] Dec 05 11:01:45 crc kubenswrapper[5014]: W1205 11:01:45.024546 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47da56e6_6794_48dc_a7e6_99e6b63ecf43.slice/crio-4537ecef265281e8fa8eb002d3f0b94bf37effa5d5ade620f070117b9667e12e WatchSource:0}: Error finding container 4537ecef265281e8fa8eb002d3f0b94bf37effa5d5ade620f070117b9667e12e: Status 404 returned error can't find the container with id 4537ecef265281e8fa8eb002d3f0b94bf37effa5d5ade620f070117b9667e12e Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.228793 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/043084bc-abeb-4cb7-bea1-7dae70ac655d-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-fzxsv\" (UID: \"043084bc-abeb-4cb7-bea1-7dae70ac655d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.233545 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/043084bc-abeb-4cb7-bea1-7dae70ac655d-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-fzxsv\" (UID: \"043084bc-abeb-4cb7-bea1-7dae70ac655d\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.431840 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.436337 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-56dd54f94b-9cwbv"] Dec 05 11:01:45 crc kubenswrapper[5014]: W1205 11:01:45.438794 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod586cf076_aed2_45c9_a74b_0f379c070775.slice/crio-32d7f86df6353e866ecdaaf5265d55b13bffaa0c41b9d69637aa7aabdaaf3d31 WatchSource:0}: Error finding container 32d7f86df6353e866ecdaaf5265d55b13bffaa0c41b9d69637aa7aabdaaf3d31: Status 404 returned error can't find the container with id 32d7f86df6353e866ecdaaf5265d55b13bffaa0c41b9d69637aa7aabdaaf3d31 Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.621442 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-56dd54f94b-9cwbv" event={"ID":"586cf076-aed2-45c9-a74b-0f379c070775","Type":"ContainerStarted","Data":"32d7f86df6353e866ecdaaf5265d55b13bffaa0c41b9d69637aa7aabdaaf3d31"} Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.622608 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q" event={"ID":"47da56e6-6794-48dc-a7e6-99e6b63ecf43","Type":"ContainerStarted","Data":"4537ecef265281e8fa8eb002d3f0b94bf37effa5d5ade620f070117b9667e12e"} Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.623478 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-44tzw" event={"ID":"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357","Type":"ContainerStarted","Data":"ffedf8927b1c625350399bfd9a70d2b6798c11810fdaab1a3c06a4ba1b8a7959"} Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.624217 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" event={"ID":"f73b5791-5c0a-4c9f-a78f-9ed2615f4538","Type":"ContainerStarted","Data":"fd76e24ddc0a44d7b605b96389dfa84d5b9405c7ad8281e50006bd5d721e482c"} Dec 05 11:01:45 crc kubenswrapper[5014]: I1205 11:01:45.669373 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv"] Dec 05 11:01:45 crc kubenswrapper[5014]: W1205 11:01:45.674835 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod043084bc_abeb_4cb7_bea1_7dae70ac655d.slice/crio-6302c42eda985c1fbdfccfa8063d08b3fb6e646874bb92534d031c7df924d687 WatchSource:0}: Error finding container 6302c42eda985c1fbdfccfa8063d08b3fb6e646874bb92534d031c7df924d687: Status 404 returned error can't find the container with id 6302c42eda985c1fbdfccfa8063d08b3fb6e646874bb92534d031c7df924d687 Dec 05 11:01:46 crc kubenswrapper[5014]: I1205 11:01:46.632642 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" event={"ID":"043084bc-abeb-4cb7-bea1-7dae70ac655d","Type":"ContainerStarted","Data":"6302c42eda985c1fbdfccfa8063d08b3fb6e646874bb92534d031c7df924d687"} Dec 05 11:01:46 crc kubenswrapper[5014]: I1205 11:01:46.634103 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-56dd54f94b-9cwbv" event={"ID":"586cf076-aed2-45c9-a74b-0f379c070775","Type":"ContainerStarted","Data":"747d71a039615491722a4046e0c36cee0809db88b12e05b0aadfc25ae64b63a0"} Dec 05 11:01:46 crc kubenswrapper[5014]: I1205 11:01:46.654677 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-56dd54f94b-9cwbv" podStartSLOduration=2.654651813 podStartE2EDuration="2.654651813s" podCreationTimestamp="2025-12-05 11:01:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:01:46.648877081 +0000 UTC m=+833.596994795" watchObservedRunningTime="2025-12-05 11:01:46.654651813 +0000 UTC m=+833.602769527" Dec 05 11:01:47 crc kubenswrapper[5014]: I1205 11:01:47.642633 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q" event={"ID":"47da56e6-6794-48dc-a7e6-99e6b63ecf43","Type":"ContainerStarted","Data":"d59d19f8c5636e7abf1e65dfd74155e92ba33a104d57ce742cc696772a314f27"} Dec 05 11:01:47 crc kubenswrapper[5014]: I1205 11:01:47.644548 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-44tzw" event={"ID":"a9df2f02-fdb6-46dc-bd30-25b7b4a2d357","Type":"ContainerStarted","Data":"0bafc49c0215e5de1c32c2468658f243c9cd3fe4375da30e13edf3b9a706b801"} Dec 05 11:01:47 crc kubenswrapper[5014]: I1205 11:01:47.644683 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:47 crc kubenswrapper[5014]: I1205 11:01:47.646103 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" event={"ID":"f73b5791-5c0a-4c9f-a78f-9ed2615f4538","Type":"ContainerStarted","Data":"251a9571fbc06d6a66b941410112b55d98a0ed1b279038d277221ddba712d699"} Dec 05 11:01:47 crc kubenswrapper[5014]: I1205 11:01:47.646425 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" Dec 05 11:01:47 crc kubenswrapper[5014]: I1205 11:01:47.658823 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-44tzw" podStartSLOduration=1.183532005 podStartE2EDuration="3.658801178s" podCreationTimestamp="2025-12-05 11:01:44 +0000 UTC" firstStartedPulling="2025-12-05 11:01:44.788643001 +0000 UTC m=+831.736760705" lastFinishedPulling="2025-12-05 11:01:47.263912154 +0000 UTC m=+834.212029878" observedRunningTime="2025-12-05 11:01:47.656106282 +0000 UTC m=+834.604224016" watchObservedRunningTime="2025-12-05 11:01:47.658801178 +0000 UTC m=+834.606918882" Dec 05 11:01:47 crc kubenswrapper[5014]: I1205 11:01:47.678218 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" podStartSLOduration=1.374784146 podStartE2EDuration="3.678190074s" podCreationTimestamp="2025-12-05 11:01:44 +0000 UTC" firstStartedPulling="2025-12-05 11:01:44.976690652 +0000 UTC m=+831.924808366" lastFinishedPulling="2025-12-05 11:01:47.28009657 +0000 UTC m=+834.228214294" observedRunningTime="2025-12-05 11:01:47.668003343 +0000 UTC m=+834.616121077" watchObservedRunningTime="2025-12-05 11:01:47.678190074 +0000 UTC m=+834.626307778" Dec 05 11:01:48 crc kubenswrapper[5014]: I1205 11:01:48.652132 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" event={"ID":"043084bc-abeb-4cb7-bea1-7dae70ac655d","Type":"ContainerStarted","Data":"4c4eef874c500bf3a9711b4af8bd511e70c6ccbbfa0263370b597d5aa383ca12"} Dec 05 11:01:48 crc kubenswrapper[5014]: I1205 11:01:48.676995 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-fzxsv" podStartSLOduration=2.06572696 podStartE2EDuration="4.676958497s" podCreationTimestamp="2025-12-05 11:01:44 +0000 UTC" firstStartedPulling="2025-12-05 11:01:45.677725905 +0000 UTC m=+832.625843609" lastFinishedPulling="2025-12-05 11:01:48.288957442 +0000 UTC m=+835.237075146" observedRunningTime="2025-12-05 11:01:48.666928041 +0000 UTC m=+835.615045775" watchObservedRunningTime="2025-12-05 11:01:48.676958497 +0000 UTC m=+835.625076201" Dec 05 11:01:49 crc kubenswrapper[5014]: I1205 11:01:49.659587 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q" event={"ID":"47da56e6-6794-48dc-a7e6-99e6b63ecf43","Type":"ContainerStarted","Data":"ef5bee853fea83e7142159b7843cd186fc14176514b7b7d0738c34646d05996c"} Dec 05 11:01:49 crc kubenswrapper[5014]: I1205 11:01:49.677681 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qkl7q" podStartSLOduration=1.453694321 podStartE2EDuration="5.677660678s" podCreationTimestamp="2025-12-05 11:01:44 +0000 UTC" firstStartedPulling="2025-12-05 11:01:45.028248887 +0000 UTC m=+831.976366591" lastFinishedPulling="2025-12-05 11:01:49.252215244 +0000 UTC m=+836.200332948" observedRunningTime="2025-12-05 11:01:49.672226385 +0000 UTC m=+836.620344129" watchObservedRunningTime="2025-12-05 11:01:49.677660678 +0000 UTC m=+836.625778382" Dec 05 11:01:54 crc kubenswrapper[5014]: I1205 11:01:54.802504 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-44tzw" Dec 05 11:01:55 crc kubenswrapper[5014]: I1205 11:01:55.014200 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:55 crc kubenswrapper[5014]: I1205 11:01:55.014343 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:55 crc kubenswrapper[5014]: I1205 11:01:55.019327 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:55 crc kubenswrapper[5014]: I1205 11:01:55.705442 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-56dd54f94b-9cwbv" Dec 05 11:01:55 crc kubenswrapper[5014]: I1205 11:01:55.775343 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-bzvs8"] Dec 05 11:02:02 crc kubenswrapper[5014]: I1205 11:02:02.936661 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:02:02 crc kubenswrapper[5014]: I1205 11:02:02.937254 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:02:02 crc kubenswrapper[5014]: I1205 11:02:02.937355 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:02:02 crc kubenswrapper[5014]: I1205 11:02:02.938208 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"38dc139c6b157093aa0187abc2a47c8fff469ab971f15976ee0dbc61fa5a9ede"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:02:02 crc kubenswrapper[5014]: I1205 11:02:02.938321 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://38dc139c6b157093aa0187abc2a47c8fff469ab971f15976ee0dbc61fa5a9ede" gracePeriod=600 Dec 05 11:02:03 crc kubenswrapper[5014]: I1205 11:02:03.754045 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="38dc139c6b157093aa0187abc2a47c8fff469ab971f15976ee0dbc61fa5a9ede" exitCode=0 Dec 05 11:02:03 crc kubenswrapper[5014]: I1205 11:02:03.754162 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"38dc139c6b157093aa0187abc2a47c8fff469ab971f15976ee0dbc61fa5a9ede"} Dec 05 11:02:03 crc kubenswrapper[5014]: I1205 11:02:03.754527 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"4ddf8f910e52a088784fd2d469973cf4512542c6f65d5608f61ef0af3d2944f1"} Dec 05 11:02:03 crc kubenswrapper[5014]: I1205 11:02:03.754555 5014 scope.go:117] "RemoveContainer" containerID="d0d4a87cec920d20a9a11a5c4acd6b4532272ee40a4586c95d621bc0d1e41f59" Dec 05 11:02:04 crc kubenswrapper[5014]: I1205 11:02:04.760500 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-rnnzf" Dec 05 11:02:20 crc kubenswrapper[5014]: I1205 11:02:20.830345 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-bzvs8" podUID="1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" containerName="console" containerID="cri-o://ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17" gracePeriod=15 Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.187765 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-bzvs8_1dd3feb2-7ce4-46d3-9e9f-c329afde30e8/console/0.log" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.187826 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.299817 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb"] Dec 05 11:02:21 crc kubenswrapper[5014]: E1205 11:02:21.300031 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" containerName="console" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.300042 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" containerName="console" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.300137 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" containerName="console" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.301080 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.302837 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.307411 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-serving-cert\") pod \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.307447 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdvm8\" (UniqueName: \"kubernetes.io/projected/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-kube-api-access-cdvm8\") pod \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.307476 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-config\") pod \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.307524 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-oauth-config\") pod \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.307582 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-oauth-serving-cert\") pod \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.307598 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-trusted-ca-bundle\") pod \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.307630 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-service-ca\") pod \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\" (UID: \"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8\") " Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.308406 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" (UID: "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.308435 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-service-ca" (OuterVolumeSpecName: "service-ca") pod "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" (UID: "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.308913 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-config" (OuterVolumeSpecName: "console-config") pod "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" (UID: "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.309010 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" (UID: "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.318121 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" (UID: "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.319713 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" (UID: "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.319751 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-kube-api-access-cdvm8" (OuterVolumeSpecName: "kube-api-access-cdvm8") pod "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" (UID: "1dd3feb2-7ce4-46d3-9e9f-c329afde30e8"). InnerVolumeSpecName "kube-api-access-cdvm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.319812 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb"] Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.409791 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.409848 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnxlc\" (UniqueName: \"kubernetes.io/projected/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-kube-api-access-cnxlc\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.409904 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.410046 5014 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.410060 5014 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.410071 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdvm8\" (UniqueName: \"kubernetes.io/projected/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-kube-api-access-cdvm8\") on node \"crc\" DevicePath \"\"" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.410079 5014 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.410087 5014 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.410096 5014 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.410106 5014 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.512063 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.512176 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnxlc\" (UniqueName: \"kubernetes.io/projected/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-kube-api-access-cnxlc\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.512248 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.513027 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.513346 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.530645 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnxlc\" (UniqueName: \"kubernetes.io/projected/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-kube-api-access-cnxlc\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.616691 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.806211 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb"] Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.895190 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-bzvs8_1dd3feb2-7ce4-46d3-9e9f-c329afde30e8/console/0.log" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.895573 5014 generic.go:334] "Generic (PLEG): container finished" podID="1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" containerID="ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17" exitCode=2 Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.895654 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bzvs8" event={"ID":"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8","Type":"ContainerDied","Data":"ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17"} Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.895677 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bzvs8" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.895714 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bzvs8" event={"ID":"1dd3feb2-7ce4-46d3-9e9f-c329afde30e8","Type":"ContainerDied","Data":"9ee9111047f3b099da6ec45342e569d733536b4dc3b09004a78f00dbea4c0620"} Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.895740 5014 scope.go:117] "RemoveContainer" containerID="ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.901031 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" event={"ID":"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5","Type":"ContainerStarted","Data":"be23bf338a45bc6ccd589df1bedaa81777f4119aef3f72085c730901a4eb2ee4"} Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.923621 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-bzvs8"] Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.925029 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-bzvs8"] Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.932464 5014 scope.go:117] "RemoveContainer" containerID="ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17" Dec 05 11:02:21 crc kubenswrapper[5014]: E1205 11:02:21.932905 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17\": container with ID starting with ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17 not found: ID does not exist" containerID="ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17" Dec 05 11:02:21 crc kubenswrapper[5014]: I1205 11:02:21.932941 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17"} err="failed to get container status \"ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17\": rpc error: code = NotFound desc = could not find container \"ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17\": container with ID starting with ca07308121cf3712bee2a8bfceaa13bd6d89c6d8d66d711bace6ad18a782ad17 not found: ID does not exist" Dec 05 11:02:22 crc kubenswrapper[5014]: I1205 11:02:22.911265 5014 generic.go:334] "Generic (PLEG): container finished" podID="9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" containerID="92ad19e37de696d804ee95604158795dd74974c74add4aa0088447a099668ecd" exitCode=0 Dec 05 11:02:22 crc kubenswrapper[5014]: I1205 11:02:22.911330 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" event={"ID":"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5","Type":"ContainerDied","Data":"92ad19e37de696d804ee95604158795dd74974c74add4aa0088447a099668ecd"} Dec 05 11:02:23 crc kubenswrapper[5014]: I1205 11:02:23.325767 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1dd3feb2-7ce4-46d3-9e9f-c329afde30e8" path="/var/lib/kubelet/pods/1dd3feb2-7ce4-46d3-9e9f-c329afde30e8/volumes" Dec 05 11:02:24 crc kubenswrapper[5014]: I1205 11:02:24.923339 5014 generic.go:334] "Generic (PLEG): container finished" podID="9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" containerID="0e92e23bf4698a2a9f109931d2cbdfe9baf122cf471c7de28ac76c1a363bdf30" exitCode=0 Dec 05 11:02:24 crc kubenswrapper[5014]: I1205 11:02:24.923406 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" event={"ID":"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5","Type":"ContainerDied","Data":"0e92e23bf4698a2a9f109931d2cbdfe9baf122cf471c7de28ac76c1a363bdf30"} Dec 05 11:02:25 crc kubenswrapper[5014]: I1205 11:02:25.931925 5014 generic.go:334] "Generic (PLEG): container finished" podID="9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" containerID="f123ad73311205cbcf4845626313c09af63aca47a45a16671eafdee7d3e049e3" exitCode=0 Dec 05 11:02:25 crc kubenswrapper[5014]: I1205 11:02:25.931971 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" event={"ID":"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5","Type":"ContainerDied","Data":"f123ad73311205cbcf4845626313c09af63aca47a45a16671eafdee7d3e049e3"} Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.184745 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.382055 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-bundle\") pod \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.382174 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-util\") pod \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.382257 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnxlc\" (UniqueName: \"kubernetes.io/projected/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-kube-api-access-cnxlc\") pod \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\" (UID: \"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5\") " Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.383582 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-bundle" (OuterVolumeSpecName: "bundle") pod "9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" (UID: "9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.388827 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-kube-api-access-cnxlc" (OuterVolumeSpecName: "kube-api-access-cnxlc") pod "9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" (UID: "9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5"). InnerVolumeSpecName "kube-api-access-cnxlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.484802 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnxlc\" (UniqueName: \"kubernetes.io/projected/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-kube-api-access-cnxlc\") on node \"crc\" DevicePath \"\"" Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.484843 5014 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.571801 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-util" (OuterVolumeSpecName: "util") pod "9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" (UID: "9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.586417 5014 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5-util\") on node \"crc\" DevicePath \"\"" Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.944557 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" event={"ID":"9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5","Type":"ContainerDied","Data":"be23bf338a45bc6ccd589df1bedaa81777f4119aef3f72085c730901a4eb2ee4"} Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.944601 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be23bf338a45bc6ccd589df1bedaa81777f4119aef3f72085c730901a4eb2ee4" Dec 05 11:02:27 crc kubenswrapper[5014]: I1205 11:02:27.944617 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.393340 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8"] Dec 05 11:02:39 crc kubenswrapper[5014]: E1205 11:02:39.394142 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" containerName="extract" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.394156 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" containerName="extract" Dec 05 11:02:39 crc kubenswrapper[5014]: E1205 11:02:39.394175 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" containerName="util" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.394182 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" containerName="util" Dec 05 11:02:39 crc kubenswrapper[5014]: E1205 11:02:39.394192 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" containerName="pull" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.394200 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" containerName="pull" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.394323 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5" containerName="extract" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.394756 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.397210 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.397354 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.398259 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.398564 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-77t57" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.398613 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.413575 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8"] Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.586046 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2a78d9b2-16fa-4586-86cf-96397edefe00-apiservice-cert\") pod \"metallb-operator-controller-manager-54df4ff95d-j68c8\" (UID: \"2a78d9b2-16fa-4586-86cf-96397edefe00\") " pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.586445 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2a78d9b2-16fa-4586-86cf-96397edefe00-webhook-cert\") pod \"metallb-operator-controller-manager-54df4ff95d-j68c8\" (UID: \"2a78d9b2-16fa-4586-86cf-96397edefe00\") " pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.586548 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc9vg\" (UniqueName: \"kubernetes.io/projected/2a78d9b2-16fa-4586-86cf-96397edefe00-kube-api-access-qc9vg\") pod \"metallb-operator-controller-manager-54df4ff95d-j68c8\" (UID: \"2a78d9b2-16fa-4586-86cf-96397edefe00\") " pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.687717 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2a78d9b2-16fa-4586-86cf-96397edefe00-webhook-cert\") pod \"metallb-operator-controller-manager-54df4ff95d-j68c8\" (UID: \"2a78d9b2-16fa-4586-86cf-96397edefe00\") " pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.688046 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc9vg\" (UniqueName: \"kubernetes.io/projected/2a78d9b2-16fa-4586-86cf-96397edefe00-kube-api-access-qc9vg\") pod \"metallb-operator-controller-manager-54df4ff95d-j68c8\" (UID: \"2a78d9b2-16fa-4586-86cf-96397edefe00\") " pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.688097 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2a78d9b2-16fa-4586-86cf-96397edefe00-apiservice-cert\") pod \"metallb-operator-controller-manager-54df4ff95d-j68c8\" (UID: \"2a78d9b2-16fa-4586-86cf-96397edefe00\") " pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.693924 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2a78d9b2-16fa-4586-86cf-96397edefe00-apiservice-cert\") pod \"metallb-operator-controller-manager-54df4ff95d-j68c8\" (UID: \"2a78d9b2-16fa-4586-86cf-96397edefe00\") " pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.694034 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2a78d9b2-16fa-4586-86cf-96397edefe00-webhook-cert\") pod \"metallb-operator-controller-manager-54df4ff95d-j68c8\" (UID: \"2a78d9b2-16fa-4586-86cf-96397edefe00\") " pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.721649 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc9vg\" (UniqueName: \"kubernetes.io/projected/2a78d9b2-16fa-4586-86cf-96397edefe00-kube-api-access-qc9vg\") pod \"metallb-operator-controller-manager-54df4ff95d-j68c8\" (UID: \"2a78d9b2-16fa-4586-86cf-96397edefe00\") " pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.817558 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r"] Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.818358 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.821348 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-w4v75" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.821487 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.821691 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.846432 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r"] Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.894642 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb726700-5715-4a97-92c4-f8a50a0922bb-webhook-cert\") pod \"metallb-operator-webhook-server-5f44fbc487-54m6r\" (UID: \"bb726700-5715-4a97-92c4-f8a50a0922bb\") " pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.894722 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6zdm\" (UniqueName: \"kubernetes.io/projected/bb726700-5715-4a97-92c4-f8a50a0922bb-kube-api-access-w6zdm\") pod \"metallb-operator-webhook-server-5f44fbc487-54m6r\" (UID: \"bb726700-5715-4a97-92c4-f8a50a0922bb\") " pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.894763 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb726700-5715-4a97-92c4-f8a50a0922bb-apiservice-cert\") pod \"metallb-operator-webhook-server-5f44fbc487-54m6r\" (UID: \"bb726700-5715-4a97-92c4-f8a50a0922bb\") " pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.995446 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb726700-5715-4a97-92c4-f8a50a0922bb-apiservice-cert\") pod \"metallb-operator-webhook-server-5f44fbc487-54m6r\" (UID: \"bb726700-5715-4a97-92c4-f8a50a0922bb\") " pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.995558 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb726700-5715-4a97-92c4-f8a50a0922bb-webhook-cert\") pod \"metallb-operator-webhook-server-5f44fbc487-54m6r\" (UID: \"bb726700-5715-4a97-92c4-f8a50a0922bb\") " pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:39 crc kubenswrapper[5014]: I1205 11:02:39.995598 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6zdm\" (UniqueName: \"kubernetes.io/projected/bb726700-5715-4a97-92c4-f8a50a0922bb-kube-api-access-w6zdm\") pod \"metallb-operator-webhook-server-5f44fbc487-54m6r\" (UID: \"bb726700-5715-4a97-92c4-f8a50a0922bb\") " pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:40 crc kubenswrapper[5014]: I1205 11:02:40.003078 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb726700-5715-4a97-92c4-f8a50a0922bb-webhook-cert\") pod \"metallb-operator-webhook-server-5f44fbc487-54m6r\" (UID: \"bb726700-5715-4a97-92c4-f8a50a0922bb\") " pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:40 crc kubenswrapper[5014]: I1205 11:02:40.003098 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb726700-5715-4a97-92c4-f8a50a0922bb-apiservice-cert\") pod \"metallb-operator-webhook-server-5f44fbc487-54m6r\" (UID: \"bb726700-5715-4a97-92c4-f8a50a0922bb\") " pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:40 crc kubenswrapper[5014]: I1205 11:02:40.012198 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:40 crc kubenswrapper[5014]: I1205 11:02:40.037331 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6zdm\" (UniqueName: \"kubernetes.io/projected/bb726700-5715-4a97-92c4-f8a50a0922bb-kube-api-access-w6zdm\") pod \"metallb-operator-webhook-server-5f44fbc487-54m6r\" (UID: \"bb726700-5715-4a97-92c4-f8a50a0922bb\") " pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:40 crc kubenswrapper[5014]: I1205 11:02:40.132984 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:40 crc kubenswrapper[5014]: I1205 11:02:40.503402 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8"] Dec 05 11:02:40 crc kubenswrapper[5014]: I1205 11:02:40.616005 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r"] Dec 05 11:02:40 crc kubenswrapper[5014]: W1205 11:02:40.622584 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb726700_5715_4a97_92c4_f8a50a0922bb.slice/crio-709d08ab6c0b59ee1429d165c77e5e70bcb291d4dd300d26bbaee6d450f98c4b WatchSource:0}: Error finding container 709d08ab6c0b59ee1429d165c77e5e70bcb291d4dd300d26bbaee6d450f98c4b: Status 404 returned error can't find the container with id 709d08ab6c0b59ee1429d165c77e5e70bcb291d4dd300d26bbaee6d450f98c4b Dec 05 11:02:41 crc kubenswrapper[5014]: I1205 11:02:41.023245 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" event={"ID":"bb726700-5715-4a97-92c4-f8a50a0922bb","Type":"ContainerStarted","Data":"709d08ab6c0b59ee1429d165c77e5e70bcb291d4dd300d26bbaee6d450f98c4b"} Dec 05 11:02:41 crc kubenswrapper[5014]: I1205 11:02:41.024379 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" event={"ID":"2a78d9b2-16fa-4586-86cf-96397edefe00","Type":"ContainerStarted","Data":"8c43b89bbab2ca0a7a967f3d07eeb3811fbde3c6dc2e5e8abfa6698cfac062a0"} Dec 05 11:02:48 crc kubenswrapper[5014]: I1205 11:02:48.102405 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" event={"ID":"2a78d9b2-16fa-4586-86cf-96397edefe00","Type":"ContainerStarted","Data":"6894910ad8c0c23509fde2164dc32ab4af52350dd62616e829dc76ff9ef1a95d"} Dec 05 11:02:48 crc kubenswrapper[5014]: I1205 11:02:48.103715 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:02:48 crc kubenswrapper[5014]: I1205 11:02:48.104422 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" event={"ID":"bb726700-5715-4a97-92c4-f8a50a0922bb","Type":"ContainerStarted","Data":"3f3cc5ca4e40ed4ef518724cfe05c52d131dd79996321aae05c8fd6e3ba9a8ee"} Dec 05 11:02:48 crc kubenswrapper[5014]: I1205 11:02:48.104538 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:02:48 crc kubenswrapper[5014]: I1205 11:02:48.127197 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" podStartSLOduration=1.998125372 podStartE2EDuration="9.1271773s" podCreationTimestamp="2025-12-05 11:02:39 +0000 UTC" firstStartedPulling="2025-12-05 11:02:40.504117376 +0000 UTC m=+887.452235080" lastFinishedPulling="2025-12-05 11:02:47.633169304 +0000 UTC m=+894.581287008" observedRunningTime="2025-12-05 11:02:48.122787503 +0000 UTC m=+895.070905227" watchObservedRunningTime="2025-12-05 11:02:48.1271773 +0000 UTC m=+895.075294994" Dec 05 11:02:48 crc kubenswrapper[5014]: I1205 11:02:48.151235 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" podStartSLOduration=2.136123753 podStartE2EDuration="9.151213727s" podCreationTimestamp="2025-12-05 11:02:39 +0000 UTC" firstStartedPulling="2025-12-05 11:02:40.625677737 +0000 UTC m=+887.573795441" lastFinishedPulling="2025-12-05 11:02:47.640767711 +0000 UTC m=+894.588885415" observedRunningTime="2025-12-05 11:02:48.1488422 +0000 UTC m=+895.096959904" watchObservedRunningTime="2025-12-05 11:02:48.151213727 +0000 UTC m=+895.099331431" Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.779282 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jzcq4"] Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.782987 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.808870 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jzcq4"] Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.837610 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-catalog-content\") pod \"community-operators-jzcq4\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.837672 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67t8t\" (UniqueName: \"kubernetes.io/projected/822a941f-e9f9-497d-beb8-4bcea6b55d3b-kube-api-access-67t8t\") pod \"community-operators-jzcq4\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.837772 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-utilities\") pod \"community-operators-jzcq4\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.939136 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-utilities\") pod \"community-operators-jzcq4\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.939454 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-catalog-content\") pod \"community-operators-jzcq4\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.939486 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67t8t\" (UniqueName: \"kubernetes.io/projected/822a941f-e9f9-497d-beb8-4bcea6b55d3b-kube-api-access-67t8t\") pod \"community-operators-jzcq4\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.939789 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-utilities\") pod \"community-operators-jzcq4\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:53 crc kubenswrapper[5014]: I1205 11:02:53.939917 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-catalog-content\") pod \"community-operators-jzcq4\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:54 crc kubenswrapper[5014]: I1205 11:02:54.003133 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67t8t\" (UniqueName: \"kubernetes.io/projected/822a941f-e9f9-497d-beb8-4bcea6b55d3b-kube-api-access-67t8t\") pod \"community-operators-jzcq4\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:54 crc kubenswrapper[5014]: I1205 11:02:54.125022 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:02:54 crc kubenswrapper[5014]: I1205 11:02:54.472344 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jzcq4"] Dec 05 11:02:54 crc kubenswrapper[5014]: W1205 11:02:54.479116 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod822a941f_e9f9_497d_beb8_4bcea6b55d3b.slice/crio-b0e3a79950e22a933aaf07365eccb757537b1b09ef7a256a9b01ccce5c273d6b WatchSource:0}: Error finding container b0e3a79950e22a933aaf07365eccb757537b1b09ef7a256a9b01ccce5c273d6b: Status 404 returned error can't find the container with id b0e3a79950e22a933aaf07365eccb757537b1b09ef7a256a9b01ccce5c273d6b Dec 05 11:02:55 crc kubenswrapper[5014]: I1205 11:02:55.159776 5014 generic.go:334] "Generic (PLEG): container finished" podID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerID="0ce2893aff90e27bdeed0dc42109f602ddec6046cd5bf4f7dda99b1741a63805" exitCode=0 Dec 05 11:02:55 crc kubenswrapper[5014]: I1205 11:02:55.160183 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jzcq4" event={"ID":"822a941f-e9f9-497d-beb8-4bcea6b55d3b","Type":"ContainerDied","Data":"0ce2893aff90e27bdeed0dc42109f602ddec6046cd5bf4f7dda99b1741a63805"} Dec 05 11:02:55 crc kubenswrapper[5014]: I1205 11:02:55.160212 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jzcq4" event={"ID":"822a941f-e9f9-497d-beb8-4bcea6b55d3b","Type":"ContainerStarted","Data":"b0e3a79950e22a933aaf07365eccb757537b1b09ef7a256a9b01ccce5c273d6b"} Dec 05 11:02:56 crc kubenswrapper[5014]: I1205 11:02:56.174508 5014 generic.go:334] "Generic (PLEG): container finished" podID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerID="bf0f939a588787ebc9e104164e317f90f222adb24d92647097a97fce99320e63" exitCode=0 Dec 05 11:02:56 crc kubenswrapper[5014]: I1205 11:02:56.174639 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jzcq4" event={"ID":"822a941f-e9f9-497d-beb8-4bcea6b55d3b","Type":"ContainerDied","Data":"bf0f939a588787ebc9e104164e317f90f222adb24d92647097a97fce99320e63"} Dec 05 11:02:57 crc kubenswrapper[5014]: I1205 11:02:57.183062 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jzcq4" event={"ID":"822a941f-e9f9-497d-beb8-4bcea6b55d3b","Type":"ContainerStarted","Data":"a952a43e71b202ad6f8130d209f75d860d717439ae68519e04a9b45ade418573"} Dec 05 11:02:57 crc kubenswrapper[5014]: I1205 11:02:57.218396 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jzcq4" podStartSLOduration=2.782131524 podStartE2EDuration="4.21837825s" podCreationTimestamp="2025-12-05 11:02:53 +0000 UTC" firstStartedPulling="2025-12-05 11:02:55.162085257 +0000 UTC m=+902.110202951" lastFinishedPulling="2025-12-05 11:02:56.598331973 +0000 UTC m=+903.546449677" observedRunningTime="2025-12-05 11:02:57.212903686 +0000 UTC m=+904.161021410" watchObservedRunningTime="2025-12-05 11:02:57.21837825 +0000 UTC m=+904.166495944" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.177713 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lrjnq"] Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.179723 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.190848 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lrjnq"] Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.318300 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-utilities\") pod \"redhat-marketplace-lrjnq\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.318362 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-catalog-content\") pod \"redhat-marketplace-lrjnq\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.318422 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkvtd\" (UniqueName: \"kubernetes.io/projected/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-kube-api-access-zkvtd\") pod \"redhat-marketplace-lrjnq\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.419708 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-utilities\") pod \"redhat-marketplace-lrjnq\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.419782 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-catalog-content\") pod \"redhat-marketplace-lrjnq\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.419853 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkvtd\" (UniqueName: \"kubernetes.io/projected/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-kube-api-access-zkvtd\") pod \"redhat-marketplace-lrjnq\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.420973 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-catalog-content\") pod \"redhat-marketplace-lrjnq\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.421005 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-utilities\") pod \"redhat-marketplace-lrjnq\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.454633 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkvtd\" (UniqueName: \"kubernetes.io/projected/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-kube-api-access-zkvtd\") pod \"redhat-marketplace-lrjnq\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.499722 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:02:59 crc kubenswrapper[5014]: I1205 11:02:59.742732 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lrjnq"] Dec 05 11:03:00 crc kubenswrapper[5014]: I1205 11:03:00.140264 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5f44fbc487-54m6r" Dec 05 11:03:00 crc kubenswrapper[5014]: I1205 11:03:00.204908 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrjnq" event={"ID":"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf","Type":"ContainerStarted","Data":"7b787312f38a9661f8ae54c90c1ac25c00f23eada5f58d0afdcd842c7bc0dafb"} Dec 05 11:03:00 crc kubenswrapper[5014]: I1205 11:03:00.204965 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrjnq" event={"ID":"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf","Type":"ContainerStarted","Data":"653f6ceeba93724c1b566355f55cce7a854b6675d2169101f0df9cd3a87bf179"} Dec 05 11:03:01 crc kubenswrapper[5014]: I1205 11:03:01.211866 5014 generic.go:334] "Generic (PLEG): container finished" podID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerID="7b787312f38a9661f8ae54c90c1ac25c00f23eada5f58d0afdcd842c7bc0dafb" exitCode=0 Dec 05 11:03:01 crc kubenswrapper[5014]: I1205 11:03:01.211909 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrjnq" event={"ID":"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf","Type":"ContainerDied","Data":"7b787312f38a9661f8ae54c90c1ac25c00f23eada5f58d0afdcd842c7bc0dafb"} Dec 05 11:03:03 crc kubenswrapper[5014]: I1205 11:03:03.227174 5014 generic.go:334] "Generic (PLEG): container finished" podID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerID="c1864f5dfcfe22d5dc6f332377162c36906ef3c0982c0c2805265164964d8b8c" exitCode=0 Dec 05 11:03:03 crc kubenswrapper[5014]: I1205 11:03:03.227305 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrjnq" event={"ID":"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf","Type":"ContainerDied","Data":"c1864f5dfcfe22d5dc6f332377162c36906ef3c0982c0c2805265164964d8b8c"} Dec 05 11:03:04 crc kubenswrapper[5014]: I1205 11:03:04.125494 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:03:04 crc kubenswrapper[5014]: I1205 11:03:04.125852 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:03:04 crc kubenswrapper[5014]: I1205 11:03:04.181455 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:03:04 crc kubenswrapper[5014]: I1205 11:03:04.235923 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrjnq" event={"ID":"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf","Type":"ContainerStarted","Data":"b66e65f62caa9fbf1bf178ab0e9777f4615aeb025c7489fbf2557c2cb411d27c"} Dec 05 11:03:04 crc kubenswrapper[5014]: I1205 11:03:04.261005 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lrjnq" podStartSLOduration=2.745066788 podStartE2EDuration="5.260986248s" podCreationTimestamp="2025-12-05 11:02:59 +0000 UTC" firstStartedPulling="2025-12-05 11:03:01.213445463 +0000 UTC m=+908.161563177" lastFinishedPulling="2025-12-05 11:03:03.729364933 +0000 UTC m=+910.677482637" observedRunningTime="2025-12-05 11:03:04.257729919 +0000 UTC m=+911.205847633" watchObservedRunningTime="2025-12-05 11:03:04.260986248 +0000 UTC m=+911.209103962" Dec 05 11:03:04 crc kubenswrapper[5014]: I1205 11:03:04.294292 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:03:07 crc kubenswrapper[5014]: I1205 11:03:07.771306 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jzcq4"] Dec 05 11:03:07 crc kubenswrapper[5014]: I1205 11:03:07.771892 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jzcq4" podUID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerName="registry-server" containerID="cri-o://a952a43e71b202ad6f8130d209f75d860d717439ae68519e04a9b45ade418573" gracePeriod=2 Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.280967 5014 generic.go:334] "Generic (PLEG): container finished" podID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerID="a952a43e71b202ad6f8130d209f75d860d717439ae68519e04a9b45ade418573" exitCode=0 Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.281259 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jzcq4" event={"ID":"822a941f-e9f9-497d-beb8-4bcea6b55d3b","Type":"ContainerDied","Data":"a952a43e71b202ad6f8130d209f75d860d717439ae68519e04a9b45ade418573"} Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.367616 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.500806 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.500888 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.539993 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.548956 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-utilities\") pod \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.549085 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-catalog-content\") pod \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.549123 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67t8t\" (UniqueName: \"kubernetes.io/projected/822a941f-e9f9-497d-beb8-4bcea6b55d3b-kube-api-access-67t8t\") pod \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\" (UID: \"822a941f-e9f9-497d-beb8-4bcea6b55d3b\") " Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.550033 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-utilities" (OuterVolumeSpecName: "utilities") pod "822a941f-e9f9-497d-beb8-4bcea6b55d3b" (UID: "822a941f-e9f9-497d-beb8-4bcea6b55d3b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.558767 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/822a941f-e9f9-497d-beb8-4bcea6b55d3b-kube-api-access-67t8t" (OuterVolumeSpecName: "kube-api-access-67t8t") pod "822a941f-e9f9-497d-beb8-4bcea6b55d3b" (UID: "822a941f-e9f9-497d-beb8-4bcea6b55d3b"). InnerVolumeSpecName "kube-api-access-67t8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.596576 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "822a941f-e9f9-497d-beb8-4bcea6b55d3b" (UID: "822a941f-e9f9-497d-beb8-4bcea6b55d3b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.650651 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.650688 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67t8t\" (UniqueName: \"kubernetes.io/projected/822a941f-e9f9-497d-beb8-4bcea6b55d3b-kube-api-access-67t8t\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:09 crc kubenswrapper[5014]: I1205 11:03:09.650703 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/822a941f-e9f9-497d-beb8-4bcea6b55d3b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:10 crc kubenswrapper[5014]: I1205 11:03:10.290710 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jzcq4" Dec 05 11:03:10 crc kubenswrapper[5014]: I1205 11:03:10.290729 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jzcq4" event={"ID":"822a941f-e9f9-497d-beb8-4bcea6b55d3b","Type":"ContainerDied","Data":"b0e3a79950e22a933aaf07365eccb757537b1b09ef7a256a9b01ccce5c273d6b"} Dec 05 11:03:10 crc kubenswrapper[5014]: I1205 11:03:10.292051 5014 scope.go:117] "RemoveContainer" containerID="a952a43e71b202ad6f8130d209f75d860d717439ae68519e04a9b45ade418573" Dec 05 11:03:10 crc kubenswrapper[5014]: I1205 11:03:10.325319 5014 scope.go:117] "RemoveContainer" containerID="bf0f939a588787ebc9e104164e317f90f222adb24d92647097a97fce99320e63" Dec 05 11:03:10 crc kubenswrapper[5014]: I1205 11:03:10.327956 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:03:10 crc kubenswrapper[5014]: I1205 11:03:10.350637 5014 scope.go:117] "RemoveContainer" containerID="0ce2893aff90e27bdeed0dc42109f602ddec6046cd5bf4f7dda99b1741a63805" Dec 05 11:03:10 crc kubenswrapper[5014]: I1205 11:03:10.355066 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jzcq4"] Dec 05 11:03:10 crc kubenswrapper[5014]: I1205 11:03:10.361217 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jzcq4"] Dec 05 11:03:11 crc kubenswrapper[5014]: I1205 11:03:11.325832 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" path="/var/lib/kubelet/pods/822a941f-e9f9-497d-beb8-4bcea6b55d3b/volumes" Dec 05 11:03:13 crc kubenswrapper[5014]: I1205 11:03:13.566475 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lrjnq"] Dec 05 11:03:13 crc kubenswrapper[5014]: I1205 11:03:13.567490 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lrjnq" podUID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerName="registry-server" containerID="cri-o://b66e65f62caa9fbf1bf178ab0e9777f4615aeb025c7489fbf2557c2cb411d27c" gracePeriod=2 Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.330428 5014 generic.go:334] "Generic (PLEG): container finished" podID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerID="b66e65f62caa9fbf1bf178ab0e9777f4615aeb025c7489fbf2557c2cb411d27c" exitCode=0 Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.330530 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrjnq" event={"ID":"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf","Type":"ContainerDied","Data":"b66e65f62caa9fbf1bf178ab0e9777f4615aeb025c7489fbf2557c2cb411d27c"} Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.445894 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.614767 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-catalog-content\") pod \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.614868 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvtd\" (UniqueName: \"kubernetes.io/projected/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-kube-api-access-zkvtd\") pod \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.614947 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-utilities\") pod \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\" (UID: \"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf\") " Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.616004 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-utilities" (OuterVolumeSpecName: "utilities") pod "d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" (UID: "d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.616243 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.631612 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-kube-api-access-zkvtd" (OuterVolumeSpecName: "kube-api-access-zkvtd") pod "d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" (UID: "d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf"). InnerVolumeSpecName "kube-api-access-zkvtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.636348 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" (UID: "d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.717777 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:14 crc kubenswrapper[5014]: I1205 11:03:14.717817 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvtd\" (UniqueName: \"kubernetes.io/projected/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf-kube-api-access-zkvtd\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:15 crc kubenswrapper[5014]: I1205 11:03:15.338613 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrjnq" event={"ID":"d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf","Type":"ContainerDied","Data":"653f6ceeba93724c1b566355f55cce7a854b6675d2169101f0df9cd3a87bf179"} Dec 05 11:03:15 crc kubenswrapper[5014]: I1205 11:03:15.338974 5014 scope.go:117] "RemoveContainer" containerID="b66e65f62caa9fbf1bf178ab0e9777f4615aeb025c7489fbf2557c2cb411d27c" Dec 05 11:03:15 crc kubenswrapper[5014]: I1205 11:03:15.338705 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lrjnq" Dec 05 11:03:15 crc kubenswrapper[5014]: I1205 11:03:15.355955 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lrjnq"] Dec 05 11:03:15 crc kubenswrapper[5014]: I1205 11:03:15.359780 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lrjnq"] Dec 05 11:03:15 crc kubenswrapper[5014]: I1205 11:03:15.362612 5014 scope.go:117] "RemoveContainer" containerID="c1864f5dfcfe22d5dc6f332377162c36906ef3c0982c0c2805265164964d8b8c" Dec 05 11:03:15 crc kubenswrapper[5014]: I1205 11:03:15.376495 5014 scope.go:117] "RemoveContainer" containerID="7b787312f38a9661f8ae54c90c1ac25c00f23eada5f58d0afdcd842c7bc0dafb" Dec 05 11:03:17 crc kubenswrapper[5014]: I1205 11:03:17.325071 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" path="/var/lib/kubelet/pods/d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf/volumes" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.014889 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-54df4ff95d-j68c8" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.787889 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-mwbcz"] Dec 05 11:03:20 crc kubenswrapper[5014]: E1205 11:03:20.788184 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerName="registry-server" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.788206 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerName="registry-server" Dec 05 11:03:20 crc kubenswrapper[5014]: E1205 11:03:20.788224 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerName="extract-utilities" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.788233 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerName="extract-utilities" Dec 05 11:03:20 crc kubenswrapper[5014]: E1205 11:03:20.788247 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerName="extract-utilities" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.788253 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerName="extract-utilities" Dec 05 11:03:20 crc kubenswrapper[5014]: E1205 11:03:20.788281 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerName="extract-content" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.788290 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerName="extract-content" Dec 05 11:03:20 crc kubenswrapper[5014]: E1205 11:03:20.788300 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerName="registry-server" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.788307 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerName="registry-server" Dec 05 11:03:20 crc kubenswrapper[5014]: E1205 11:03:20.788325 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerName="extract-content" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.788332 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerName="extract-content" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.788469 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="822a941f-e9f9-497d-beb8-4bcea6b55d3b" containerName="registry-server" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.788488 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5d37a0d-b8e1-48ea-b5b1-b980aa70b6cf" containerName="registry-server" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.790869 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.796348 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.796654 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-2dq58" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.796902 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.801323 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84"] Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.802281 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.805345 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.807828 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84"] Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.887460 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-qr2zx"] Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.888680 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-qr2zx" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.890827 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.891365 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.892880 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.893990 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-7dx4z" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.897044 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-reloader\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.897087 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-metrics\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.897135 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-frr-startup\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.897155 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-frr-conf\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.897173 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-metrics-certs\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.897188 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b5wx\" (UniqueName: \"kubernetes.io/projected/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-kube-api-access-5b5wx\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.897207 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-frr-sockets\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.922746 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-26kt6"] Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.923608 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.933702 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-26kt6"] Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.934419 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998328 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7sj8\" (UniqueName: \"kubernetes.io/projected/cd82d1e5-3ac0-4669-a192-3b8bbf071ad5-kube-api-access-h7sj8\") pod \"frr-k8s-webhook-server-7fcb986d4-8xg84\" (UID: \"cd82d1e5-3ac0-4669-a192-3b8bbf071ad5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998421 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-reloader\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998453 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-metrics\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998481 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0cc6b871-45be-4887-a73b-a2fe99989d41-metallb-excludel2\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998507 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-metrics-certs\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998533 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pph2k\" (UniqueName: \"kubernetes.io/projected/0cc6b871-45be-4887-a73b-a2fe99989d41-kube-api-access-pph2k\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998555 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-memberlist\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998582 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd82d1e5-3ac0-4669-a192-3b8bbf071ad5-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-8xg84\" (UID: \"cd82d1e5-3ac0-4669-a192-3b8bbf071ad5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998607 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-frr-startup\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998625 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-frr-conf\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998644 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-metrics-certs\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998666 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b5wx\" (UniqueName: \"kubernetes.io/projected/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-kube-api-access-5b5wx\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.998691 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-frr-sockets\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:20 crc kubenswrapper[5014]: I1205 11:03:20.999131 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-frr-sockets\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:20.999982 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-frr-conf\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.000188 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-frr-startup\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.000206 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-metrics\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.000402 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-reloader\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.004908 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-metrics-certs\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.018851 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b5wx\" (UniqueName: \"kubernetes.io/projected/d67c4f1f-c7c8-4c51-ac84-3bf0261e4660-kube-api-access-5b5wx\") pod \"frr-k8s-mwbcz\" (UID: \"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660\") " pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.099345 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0cc6b871-45be-4887-a73b-a2fe99989d41-metallb-excludel2\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.099397 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-metrics-certs\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.099434 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pph2k\" (UniqueName: \"kubernetes.io/projected/0cc6b871-45be-4887-a73b-a2fe99989d41-kube-api-access-pph2k\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.099470 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcqkg\" (UniqueName: \"kubernetes.io/projected/6fd388fd-a96d-4997-b3b7-9fef3d7130b7-kube-api-access-tcqkg\") pod \"controller-f8648f98b-26kt6\" (UID: \"6fd388fd-a96d-4997-b3b7-9fef3d7130b7\") " pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.099493 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-memberlist\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.099523 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd82d1e5-3ac0-4669-a192-3b8bbf071ad5-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-8xg84\" (UID: \"cd82d1e5-3ac0-4669-a192-3b8bbf071ad5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.099568 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6fd388fd-a96d-4997-b3b7-9fef3d7130b7-metrics-certs\") pod \"controller-f8648f98b-26kt6\" (UID: \"6fd388fd-a96d-4997-b3b7-9fef3d7130b7\") " pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:21 crc kubenswrapper[5014]: E1205 11:03:21.099580 5014 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 05 11:03:21 crc kubenswrapper[5014]: E1205 11:03:21.099656 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-metrics-certs podName:0cc6b871-45be-4887-a73b-a2fe99989d41 nodeName:}" failed. No retries permitted until 2025-12-05 11:03:21.599639979 +0000 UTC m=+928.547757683 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-metrics-certs") pod "speaker-qr2zx" (UID: "0cc6b871-45be-4887-a73b-a2fe99989d41") : secret "speaker-certs-secret" not found Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.099600 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7sj8\" (UniqueName: \"kubernetes.io/projected/cd82d1e5-3ac0-4669-a192-3b8bbf071ad5-kube-api-access-h7sj8\") pod \"frr-k8s-webhook-server-7fcb986d4-8xg84\" (UID: \"cd82d1e5-3ac0-4669-a192-3b8bbf071ad5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.099746 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6fd388fd-a96d-4997-b3b7-9fef3d7130b7-cert\") pod \"controller-f8648f98b-26kt6\" (UID: \"6fd388fd-a96d-4997-b3b7-9fef3d7130b7\") " pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:21 crc kubenswrapper[5014]: E1205 11:03:21.099830 5014 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 11:03:21 crc kubenswrapper[5014]: E1205 11:03:21.099869 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-memberlist podName:0cc6b871-45be-4887-a73b-a2fe99989d41 nodeName:}" failed. No retries permitted until 2025-12-05 11:03:21.599858664 +0000 UTC m=+928.547976468 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-memberlist") pod "speaker-qr2zx" (UID: "0cc6b871-45be-4887-a73b-a2fe99989d41") : secret "metallb-memberlist" not found Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.100085 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0cc6b871-45be-4887-a73b-a2fe99989d41-metallb-excludel2\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.104237 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd82d1e5-3ac0-4669-a192-3b8bbf071ad5-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-8xg84\" (UID: \"cd82d1e5-3ac0-4669-a192-3b8bbf071ad5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.110912 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.125996 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7sj8\" (UniqueName: \"kubernetes.io/projected/cd82d1e5-3ac0-4669-a192-3b8bbf071ad5-kube-api-access-h7sj8\") pod \"frr-k8s-webhook-server-7fcb986d4-8xg84\" (UID: \"cd82d1e5-3ac0-4669-a192-3b8bbf071ad5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.127232 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pph2k\" (UniqueName: \"kubernetes.io/projected/0cc6b871-45be-4887-a73b-a2fe99989d41-kube-api-access-pph2k\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.200951 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6fd388fd-a96d-4997-b3b7-9fef3d7130b7-metrics-certs\") pod \"controller-f8648f98b-26kt6\" (UID: \"6fd388fd-a96d-4997-b3b7-9fef3d7130b7\") " pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.201023 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6fd388fd-a96d-4997-b3b7-9fef3d7130b7-cert\") pod \"controller-f8648f98b-26kt6\" (UID: \"6fd388fd-a96d-4997-b3b7-9fef3d7130b7\") " pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.201094 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcqkg\" (UniqueName: \"kubernetes.io/projected/6fd388fd-a96d-4997-b3b7-9fef3d7130b7-kube-api-access-tcqkg\") pod \"controller-f8648f98b-26kt6\" (UID: \"6fd388fd-a96d-4997-b3b7-9fef3d7130b7\") " pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.205480 5014 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.205984 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6fd388fd-a96d-4997-b3b7-9fef3d7130b7-metrics-certs\") pod \"controller-f8648f98b-26kt6\" (UID: \"6fd388fd-a96d-4997-b3b7-9fef3d7130b7\") " pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.217685 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6fd388fd-a96d-4997-b3b7-9fef3d7130b7-cert\") pod \"controller-f8648f98b-26kt6\" (UID: \"6fd388fd-a96d-4997-b3b7-9fef3d7130b7\") " pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.232048 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcqkg\" (UniqueName: \"kubernetes.io/projected/6fd388fd-a96d-4997-b3b7-9fef3d7130b7-kube-api-access-tcqkg\") pod \"controller-f8648f98b-26kt6\" (UID: \"6fd388fd-a96d-4997-b3b7-9fef3d7130b7\") " pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.251521 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.375306 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mwbcz" event={"ID":"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660","Type":"ContainerStarted","Data":"5c784dc51bf3b28d3c9cfcd26996fab7ac4146641da624e5ac88ff1a98f1b417"} Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.420080 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.486395 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-26kt6"] Dec 05 11:03:21 crc kubenswrapper[5014]: W1205 11:03:21.497464 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fd388fd_a96d_4997_b3b7_9fef3d7130b7.slice/crio-c6093b34bfd8750268df2c0eb7821f239976744effefa221b8fe1b8c1aeef2a3 WatchSource:0}: Error finding container c6093b34bfd8750268df2c0eb7821f239976744effefa221b8fe1b8c1aeef2a3: Status 404 returned error can't find the container with id c6093b34bfd8750268df2c0eb7821f239976744effefa221b8fe1b8c1aeef2a3 Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.609880 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-metrics-certs\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.609943 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-memberlist\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:21 crc kubenswrapper[5014]: E1205 11:03:21.610083 5014 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 11:03:21 crc kubenswrapper[5014]: E1205 11:03:21.610137 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-memberlist podName:0cc6b871-45be-4887-a73b-a2fe99989d41 nodeName:}" failed. No retries permitted until 2025-12-05 11:03:22.61012466 +0000 UTC m=+929.558242364 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-memberlist") pod "speaker-qr2zx" (UID: "0cc6b871-45be-4887-a73b-a2fe99989d41") : secret "metallb-memberlist" not found Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.615230 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-metrics-certs\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:21 crc kubenswrapper[5014]: I1205 11:03:21.624789 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84"] Dec 05 11:03:21 crc kubenswrapper[5014]: W1205 11:03:21.633238 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd82d1e5_3ac0_4669_a192_3b8bbf071ad5.slice/crio-e65915194201ad9c3e9d4b0974b9dcf0503ae7fbddd57987b10acbd4c6c4e050 WatchSource:0}: Error finding container e65915194201ad9c3e9d4b0974b9dcf0503ae7fbddd57987b10acbd4c6c4e050: Status 404 returned error can't find the container with id e65915194201ad9c3e9d4b0974b9dcf0503ae7fbddd57987b10acbd4c6c4e050 Dec 05 11:03:22 crc kubenswrapper[5014]: I1205 11:03:22.383255 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-26kt6" event={"ID":"6fd388fd-a96d-4997-b3b7-9fef3d7130b7","Type":"ContainerStarted","Data":"46d2a487e46648e71bd8c84ed901175905af0ebfd176c47a1352c4d3630c655d"} Dec 05 11:03:22 crc kubenswrapper[5014]: I1205 11:03:22.383342 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-26kt6" event={"ID":"6fd388fd-a96d-4997-b3b7-9fef3d7130b7","Type":"ContainerStarted","Data":"f0eb0c214c0d19f1eea7a193c070e3d209e844eaeaa9585cdc86bf7eae9c200d"} Dec 05 11:03:22 crc kubenswrapper[5014]: I1205 11:03:22.383360 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-26kt6" event={"ID":"6fd388fd-a96d-4997-b3b7-9fef3d7130b7","Type":"ContainerStarted","Data":"c6093b34bfd8750268df2c0eb7821f239976744effefa221b8fe1b8c1aeef2a3"} Dec 05 11:03:22 crc kubenswrapper[5014]: I1205 11:03:22.383430 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:22 crc kubenswrapper[5014]: I1205 11:03:22.384192 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" event={"ID":"cd82d1e5-3ac0-4669-a192-3b8bbf071ad5","Type":"ContainerStarted","Data":"e65915194201ad9c3e9d4b0974b9dcf0503ae7fbddd57987b10acbd4c6c4e050"} Dec 05 11:03:22 crc kubenswrapper[5014]: I1205 11:03:22.399538 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-26kt6" podStartSLOduration=2.399516213 podStartE2EDuration="2.399516213s" podCreationTimestamp="2025-12-05 11:03:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:03:22.397036582 +0000 UTC m=+929.345154326" watchObservedRunningTime="2025-12-05 11:03:22.399516213 +0000 UTC m=+929.347633917" Dec 05 11:03:22 crc kubenswrapper[5014]: I1205 11:03:22.623901 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-memberlist\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:22 crc kubenswrapper[5014]: I1205 11:03:22.628724 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0cc6b871-45be-4887-a73b-a2fe99989d41-memberlist\") pod \"speaker-qr2zx\" (UID: \"0cc6b871-45be-4887-a73b-a2fe99989d41\") " pod="metallb-system/speaker-qr2zx" Dec 05 11:03:22 crc kubenswrapper[5014]: I1205 11:03:22.701427 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-qr2zx" Dec 05 11:03:23 crc kubenswrapper[5014]: I1205 11:03:23.413908 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-qr2zx" event={"ID":"0cc6b871-45be-4887-a73b-a2fe99989d41","Type":"ContainerStarted","Data":"915b3d72e517a20caf41632c55f2a7e597adc947ddc6afdd3a858ca0c4de85f8"} Dec 05 11:03:23 crc kubenswrapper[5014]: I1205 11:03:23.413954 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-qr2zx" event={"ID":"0cc6b871-45be-4887-a73b-a2fe99989d41","Type":"ContainerStarted","Data":"669011eff67a18e1704a8a708f60b18985eb1651f1f56a93b5f9659f9077ef46"} Dec 05 11:03:24 crc kubenswrapper[5014]: I1205 11:03:24.420092 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-qr2zx" event={"ID":"0cc6b871-45be-4887-a73b-a2fe99989d41","Type":"ContainerStarted","Data":"b74501d7ca1623959c9cf71b32ad49e06cccb1d0f71f9b7f88d1027e10bb825c"} Dec 05 11:03:24 crc kubenswrapper[5014]: I1205 11:03:24.420433 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-qr2zx" Dec 05 11:03:24 crc kubenswrapper[5014]: I1205 11:03:24.440014 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-qr2zx" podStartSLOduration=4.439996422 podStartE2EDuration="4.439996422s" podCreationTimestamp="2025-12-05 11:03:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:03:24.43704632 +0000 UTC m=+931.385164034" watchObservedRunningTime="2025-12-05 11:03:24.439996422 +0000 UTC m=+931.388114126" Dec 05 11:03:29 crc kubenswrapper[5014]: I1205 11:03:29.457419 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" event={"ID":"cd82d1e5-3ac0-4669-a192-3b8bbf071ad5","Type":"ContainerStarted","Data":"42a0ad33cd9a733f374349dcabb72613e0ee5fd5e3747f4341a83e72561f4e18"} Dec 05 11:03:29 crc kubenswrapper[5014]: I1205 11:03:29.458034 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" Dec 05 11:03:29 crc kubenswrapper[5014]: I1205 11:03:29.459342 5014 generic.go:334] "Generic (PLEG): container finished" podID="d67c4f1f-c7c8-4c51-ac84-3bf0261e4660" containerID="b3b02a1d33ddcd96ac56f956e3215cf132fc2752647dd86fce5f9ba7350aa75a" exitCode=0 Dec 05 11:03:29 crc kubenswrapper[5014]: I1205 11:03:29.459391 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mwbcz" event={"ID":"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660","Type":"ContainerDied","Data":"b3b02a1d33ddcd96ac56f956e3215cf132fc2752647dd86fce5f9ba7350aa75a"} Dec 05 11:03:29 crc kubenswrapper[5014]: I1205 11:03:29.482541 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" podStartSLOduration=2.352911509 podStartE2EDuration="9.482523155s" podCreationTimestamp="2025-12-05 11:03:20 +0000 UTC" firstStartedPulling="2025-12-05 11:03:21.635088238 +0000 UTC m=+928.583205942" lastFinishedPulling="2025-12-05 11:03:28.764699884 +0000 UTC m=+935.712817588" observedRunningTime="2025-12-05 11:03:29.479483061 +0000 UTC m=+936.427600765" watchObservedRunningTime="2025-12-05 11:03:29.482523155 +0000 UTC m=+936.430640869" Dec 05 11:03:30 crc kubenswrapper[5014]: I1205 11:03:30.467416 5014 generic.go:334] "Generic (PLEG): container finished" podID="d67c4f1f-c7c8-4c51-ac84-3bf0261e4660" containerID="7404fc25513a2192a0dc45ebf0bab60a2fecc40af77f60f23cf01e7d5b3f08bb" exitCode=0 Dec 05 11:03:30 crc kubenswrapper[5014]: I1205 11:03:30.467478 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mwbcz" event={"ID":"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660","Type":"ContainerDied","Data":"7404fc25513a2192a0dc45ebf0bab60a2fecc40af77f60f23cf01e7d5b3f08bb"} Dec 05 11:03:31 crc kubenswrapper[5014]: I1205 11:03:31.267107 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-26kt6" Dec 05 11:03:31 crc kubenswrapper[5014]: I1205 11:03:31.474331 5014 generic.go:334] "Generic (PLEG): container finished" podID="d67c4f1f-c7c8-4c51-ac84-3bf0261e4660" containerID="529aebdbc184927d7eec4807bfaace618fc6a1ea7d6211e389858c2db021d06d" exitCode=0 Dec 05 11:03:31 crc kubenswrapper[5014]: I1205 11:03:31.474382 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mwbcz" event={"ID":"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660","Type":"ContainerDied","Data":"529aebdbc184927d7eec4807bfaace618fc6a1ea7d6211e389858c2db021d06d"} Dec 05 11:03:32 crc kubenswrapper[5014]: I1205 11:03:32.488233 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mwbcz" event={"ID":"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660","Type":"ContainerStarted","Data":"945cce71a60740db3f973aa43aa6baf2aea9fd3690ea9f222ef547d4da5f3e93"} Dec 05 11:03:32 crc kubenswrapper[5014]: I1205 11:03:32.488651 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mwbcz" event={"ID":"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660","Type":"ContainerStarted","Data":"8c31761caa854e7e33c83fe9afabcbe301a26254513aff4874c6ab08ef450db6"} Dec 05 11:03:32 crc kubenswrapper[5014]: I1205 11:03:32.488897 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mwbcz" event={"ID":"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660","Type":"ContainerStarted","Data":"6b7746624f440b2716aa944d7fea1901c2c1ecd58aa1840a6f5d7e7e18917dbf"} Dec 05 11:03:32 crc kubenswrapper[5014]: I1205 11:03:32.488909 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mwbcz" event={"ID":"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660","Type":"ContainerStarted","Data":"84812175059cf1fbd7391d5edba35b940f47d33395c79587199ee51087834ca3"} Dec 05 11:03:32 crc kubenswrapper[5014]: I1205 11:03:32.488918 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mwbcz" event={"ID":"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660","Type":"ContainerStarted","Data":"f54fe9266a7a3354c7c603833a813e8cc0f4eeb681a3f06fc941aa9e7b678fcd"} Dec 05 11:03:32 crc kubenswrapper[5014]: I1205 11:03:32.488926 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mwbcz" event={"ID":"d67c4f1f-c7c8-4c51-ac84-3bf0261e4660","Type":"ContainerStarted","Data":"85cb1999add026f15db15abba88283e920f3bce8f8e7dfb97672a7efcf737789"} Dec 05 11:03:32 crc kubenswrapper[5014]: I1205 11:03:32.488938 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:32 crc kubenswrapper[5014]: I1205 11:03:32.514407 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-mwbcz" podStartSLOduration=5.069514342 podStartE2EDuration="12.514391915s" podCreationTimestamp="2025-12-05 11:03:20 +0000 UTC" firstStartedPulling="2025-12-05 11:03:21.301585847 +0000 UTC m=+928.249703551" lastFinishedPulling="2025-12-05 11:03:28.74646342 +0000 UTC m=+935.694581124" observedRunningTime="2025-12-05 11:03:32.511020833 +0000 UTC m=+939.459138557" watchObservedRunningTime="2025-12-05 11:03:32.514391915 +0000 UTC m=+939.462509619" Dec 05 11:03:32 crc kubenswrapper[5014]: I1205 11:03:32.707370 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-qr2zx" Dec 05 11:03:36 crc kubenswrapper[5014]: I1205 11:03:36.112624 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:36 crc kubenswrapper[5014]: I1205 11:03:36.177709 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.176802 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-snr6z"] Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.178142 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-snr6z" Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.180166 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.183647 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-2gzjk" Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.191618 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-snr6z"] Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.195487 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.353865 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84fx7\" (UniqueName: \"kubernetes.io/projected/f1566577-7102-49a6-a5b8-d27f4b03e350-kube-api-access-84fx7\") pod \"openstack-operator-index-snr6z\" (UID: \"f1566577-7102-49a6-a5b8-d27f4b03e350\") " pod="openstack-operators/openstack-operator-index-snr6z" Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.455167 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84fx7\" (UniqueName: \"kubernetes.io/projected/f1566577-7102-49a6-a5b8-d27f4b03e350-kube-api-access-84fx7\") pod \"openstack-operator-index-snr6z\" (UID: \"f1566577-7102-49a6-a5b8-d27f4b03e350\") " pod="openstack-operators/openstack-operator-index-snr6z" Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.476969 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84fx7\" (UniqueName: \"kubernetes.io/projected/f1566577-7102-49a6-a5b8-d27f4b03e350-kube-api-access-84fx7\") pod \"openstack-operator-index-snr6z\" (UID: \"f1566577-7102-49a6-a5b8-d27f4b03e350\") " pod="openstack-operators/openstack-operator-index-snr6z" Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.503845 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-snr6z" Dec 05 11:03:39 crc kubenswrapper[5014]: I1205 11:03:39.931489 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-snr6z"] Dec 05 11:03:40 crc kubenswrapper[5014]: I1205 11:03:40.544440 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-snr6z" event={"ID":"f1566577-7102-49a6-a5b8-d27f4b03e350","Type":"ContainerStarted","Data":"b3865586f7d805de831f356606b0a01b5db254e23a65f9ba0d076c2bd3c2fe22"} Dec 05 11:03:41 crc kubenswrapper[5014]: I1205 11:03:41.114256 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-mwbcz" Dec 05 11:03:41 crc kubenswrapper[5014]: I1205 11:03:41.423871 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8xg84" Dec 05 11:03:43 crc kubenswrapper[5014]: I1205 11:03:43.562836 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-snr6z" event={"ID":"f1566577-7102-49a6-a5b8-d27f4b03e350","Type":"ContainerStarted","Data":"fb0568ab3ca18d15762e4790ee86e3411d7cbae7c2a1f10239a80d4776bd2f72"} Dec 05 11:03:43 crc kubenswrapper[5014]: I1205 11:03:43.580358 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-snr6z" podStartSLOduration=1.155928307 podStartE2EDuration="4.580335576s" podCreationTimestamp="2025-12-05 11:03:39 +0000 UTC" firstStartedPulling="2025-12-05 11:03:39.951436187 +0000 UTC m=+946.899553891" lastFinishedPulling="2025-12-05 11:03:43.375843456 +0000 UTC m=+950.323961160" observedRunningTime="2025-12-05 11:03:43.575140199 +0000 UTC m=+950.523257913" watchObservedRunningTime="2025-12-05 11:03:43.580335576 +0000 UTC m=+950.528453280" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.375064 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2dn4t"] Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.376930 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.388217 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2dn4t"] Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.433351 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm97q\" (UniqueName: \"kubernetes.io/projected/cf4056da-d26f-4b36-b598-50e73d849eed-kube-api-access-rm97q\") pod \"certified-operators-2dn4t\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.433715 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-utilities\") pod \"certified-operators-2dn4t\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.433796 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-catalog-content\") pod \"certified-operators-2dn4t\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.535128 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-utilities\") pod \"certified-operators-2dn4t\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.535225 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-catalog-content\") pod \"certified-operators-2dn4t\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.535354 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm97q\" (UniqueName: \"kubernetes.io/projected/cf4056da-d26f-4b36-b598-50e73d849eed-kube-api-access-rm97q\") pod \"certified-operators-2dn4t\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.535714 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-utilities\") pod \"certified-operators-2dn4t\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.535729 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-catalog-content\") pod \"certified-operators-2dn4t\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.554317 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm97q\" (UniqueName: \"kubernetes.io/projected/cf4056da-d26f-4b36-b598-50e73d849eed-kube-api-access-rm97q\") pod \"certified-operators-2dn4t\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:44 crc kubenswrapper[5014]: I1205 11:03:44.734219 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:45 crc kubenswrapper[5014]: I1205 11:03:45.210061 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2dn4t"] Dec 05 11:03:45 crc kubenswrapper[5014]: I1205 11:03:45.576557 5014 generic.go:334] "Generic (PLEG): container finished" podID="cf4056da-d26f-4b36-b598-50e73d849eed" containerID="c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef" exitCode=0 Dec 05 11:03:45 crc kubenswrapper[5014]: I1205 11:03:45.576636 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dn4t" event={"ID":"cf4056da-d26f-4b36-b598-50e73d849eed","Type":"ContainerDied","Data":"c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef"} Dec 05 11:03:45 crc kubenswrapper[5014]: I1205 11:03:45.577059 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dn4t" event={"ID":"cf4056da-d26f-4b36-b598-50e73d849eed","Type":"ContainerStarted","Data":"a3b04fefb024ca9be251d89042166b7ea78e81217e4e956ddb576e349767fec5"} Dec 05 11:03:46 crc kubenswrapper[5014]: I1205 11:03:46.583532 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dn4t" event={"ID":"cf4056da-d26f-4b36-b598-50e73d849eed","Type":"ContainerStarted","Data":"330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7"} Dec 05 11:03:47 crc kubenswrapper[5014]: I1205 11:03:47.592048 5014 generic.go:334] "Generic (PLEG): container finished" podID="cf4056da-d26f-4b36-b598-50e73d849eed" containerID="330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7" exitCode=0 Dec 05 11:03:47 crc kubenswrapper[5014]: I1205 11:03:47.592118 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dn4t" event={"ID":"cf4056da-d26f-4b36-b598-50e73d849eed","Type":"ContainerDied","Data":"330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7"} Dec 05 11:03:48 crc kubenswrapper[5014]: I1205 11:03:48.599889 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dn4t" event={"ID":"cf4056da-d26f-4b36-b598-50e73d849eed","Type":"ContainerStarted","Data":"f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af"} Dec 05 11:03:48 crc kubenswrapper[5014]: I1205 11:03:48.619444 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2dn4t" podStartSLOduration=1.864068707 podStartE2EDuration="4.619425034s" podCreationTimestamp="2025-12-05 11:03:44 +0000 UTC" firstStartedPulling="2025-12-05 11:03:45.578443092 +0000 UTC m=+952.526560796" lastFinishedPulling="2025-12-05 11:03:48.333799419 +0000 UTC m=+955.281917123" observedRunningTime="2025-12-05 11:03:48.614828272 +0000 UTC m=+955.562945996" watchObservedRunningTime="2025-12-05 11:03:48.619425034 +0000 UTC m=+955.567542738" Dec 05 11:03:49 crc kubenswrapper[5014]: I1205 11:03:49.504088 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-snr6z" Dec 05 11:03:49 crc kubenswrapper[5014]: I1205 11:03:49.504175 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-snr6z" Dec 05 11:03:49 crc kubenswrapper[5014]: I1205 11:03:49.533603 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-snr6z" Dec 05 11:03:49 crc kubenswrapper[5014]: I1205 11:03:49.630191 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-snr6z" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.220124 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j"] Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.222410 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.230566 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j"] Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.231477 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-d7927" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.254823 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-util\") pod \"7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.254891 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-bundle\") pod \"7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.255034 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spghh\" (UniqueName: \"kubernetes.io/projected/70125d36-6327-44d7-b08e-485e002d024e-kube-api-access-spghh\") pod \"7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.357238 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-util\") pod \"7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.357346 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-bundle\") pod \"7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.357393 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spghh\" (UniqueName: \"kubernetes.io/projected/70125d36-6327-44d7-b08e-485e002d024e-kube-api-access-spghh\") pod \"7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.357994 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-util\") pod \"7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.358073 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-bundle\") pod \"7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.396013 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spghh\" (UniqueName: \"kubernetes.io/projected/70125d36-6327-44d7-b08e-485e002d024e-kube-api-access-spghh\") pod \"7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.544112 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-d7927" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.551317 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:53 crc kubenswrapper[5014]: I1205 11:03:53.802522 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j"] Dec 05 11:03:54 crc kubenswrapper[5014]: I1205 11:03:54.642999 5014 generic.go:334] "Generic (PLEG): container finished" podID="70125d36-6327-44d7-b08e-485e002d024e" containerID="0a682a7131f3d13435b7bd7146421db89d63bd9c2b17488474c65e28626aaed0" exitCode=0 Dec 05 11:03:54 crc kubenswrapper[5014]: I1205 11:03:54.643090 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" event={"ID":"70125d36-6327-44d7-b08e-485e002d024e","Type":"ContainerDied","Data":"0a682a7131f3d13435b7bd7146421db89d63bd9c2b17488474c65e28626aaed0"} Dec 05 11:03:54 crc kubenswrapper[5014]: I1205 11:03:54.643562 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" event={"ID":"70125d36-6327-44d7-b08e-485e002d024e","Type":"ContainerStarted","Data":"c1cecd0fb570a475cbd4c3d86a593f71e3f38bcfe59f76bc2b719b05032f939c"} Dec 05 11:03:54 crc kubenswrapper[5014]: I1205 11:03:54.734389 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:54 crc kubenswrapper[5014]: I1205 11:03:54.734671 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:54 crc kubenswrapper[5014]: I1205 11:03:54.794182 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:55 crc kubenswrapper[5014]: I1205 11:03:55.649674 5014 generic.go:334] "Generic (PLEG): container finished" podID="70125d36-6327-44d7-b08e-485e002d024e" containerID="fe68d0bc6d76156f932ff12bfc8115cdf7165934ee32e6d7f7d60e844ff113d1" exitCode=0 Dec 05 11:03:55 crc kubenswrapper[5014]: I1205 11:03:55.649765 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" event={"ID":"70125d36-6327-44d7-b08e-485e002d024e","Type":"ContainerDied","Data":"fe68d0bc6d76156f932ff12bfc8115cdf7165934ee32e6d7f7d60e844ff113d1"} Dec 05 11:03:55 crc kubenswrapper[5014]: I1205 11:03:55.742124 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:56 crc kubenswrapper[5014]: I1205 11:03:56.660010 5014 generic.go:334] "Generic (PLEG): container finished" podID="70125d36-6327-44d7-b08e-485e002d024e" containerID="d05f2915b6907215ab54364df4ee6daaf84bc531c348aab86e780e3649e67fc3" exitCode=0 Dec 05 11:03:56 crc kubenswrapper[5014]: I1205 11:03:56.660083 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" event={"ID":"70125d36-6327-44d7-b08e-485e002d024e","Type":"ContainerDied","Data":"d05f2915b6907215ab54364df4ee6daaf84bc531c348aab86e780e3649e67fc3"} Dec 05 11:03:57 crc kubenswrapper[5014]: I1205 11:03:57.983892 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.021005 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-util\") pod \"70125d36-6327-44d7-b08e-485e002d024e\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.021637 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spghh\" (UniqueName: \"kubernetes.io/projected/70125d36-6327-44d7-b08e-485e002d024e-kube-api-access-spghh\") pod \"70125d36-6327-44d7-b08e-485e002d024e\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.021676 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-bundle\") pod \"70125d36-6327-44d7-b08e-485e002d024e\" (UID: \"70125d36-6327-44d7-b08e-485e002d024e\") " Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.022294 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-bundle" (OuterVolumeSpecName: "bundle") pod "70125d36-6327-44d7-b08e-485e002d024e" (UID: "70125d36-6327-44d7-b08e-485e002d024e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.027021 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70125d36-6327-44d7-b08e-485e002d024e-kube-api-access-spghh" (OuterVolumeSpecName: "kube-api-access-spghh") pod "70125d36-6327-44d7-b08e-485e002d024e" (UID: "70125d36-6327-44d7-b08e-485e002d024e"). InnerVolumeSpecName "kube-api-access-spghh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.033999 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-util" (OuterVolumeSpecName: "util") pod "70125d36-6327-44d7-b08e-485e002d024e" (UID: "70125d36-6327-44d7-b08e-485e002d024e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.123182 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spghh\" (UniqueName: \"kubernetes.io/projected/70125d36-6327-44d7-b08e-485e002d024e-kube-api-access-spghh\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.123231 5014 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.123241 5014 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/70125d36-6327-44d7-b08e-485e002d024e-util\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.573508 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2dn4t"] Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.683781 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" event={"ID":"70125d36-6327-44d7-b08e-485e002d024e","Type":"ContainerDied","Data":"c1cecd0fb570a475cbd4c3d86a593f71e3f38bcfe59f76bc2b719b05032f939c"} Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.683838 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1cecd0fb570a475cbd4c3d86a593f71e3f38bcfe59f76bc2b719b05032f939c" Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.683887 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2dn4t" podUID="cf4056da-d26f-4b36-b598-50e73d849eed" containerName="registry-server" containerID="cri-o://f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af" gracePeriod=2 Dec 05 11:03:58 crc kubenswrapper[5014]: I1205 11:03:58.683914 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.545038 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.694613 5014 generic.go:334] "Generic (PLEG): container finished" podID="cf4056da-d26f-4b36-b598-50e73d849eed" containerID="f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af" exitCode=0 Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.694691 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dn4t" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.694688 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dn4t" event={"ID":"cf4056da-d26f-4b36-b598-50e73d849eed","Type":"ContainerDied","Data":"f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af"} Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.695281 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dn4t" event={"ID":"cf4056da-d26f-4b36-b598-50e73d849eed","Type":"ContainerDied","Data":"a3b04fefb024ca9be251d89042166b7ea78e81217e4e956ddb576e349767fec5"} Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.695352 5014 scope.go:117] "RemoveContainer" containerID="f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.724512 5014 scope.go:117] "RemoveContainer" containerID="330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.741367 5014 scope.go:117] "RemoveContainer" containerID="c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.748049 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-utilities\") pod \"cf4056da-d26f-4b36-b598-50e73d849eed\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.748242 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm97q\" (UniqueName: \"kubernetes.io/projected/cf4056da-d26f-4b36-b598-50e73d849eed-kube-api-access-rm97q\") pod \"cf4056da-d26f-4b36-b598-50e73d849eed\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.748431 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-catalog-content\") pod \"cf4056da-d26f-4b36-b598-50e73d849eed\" (UID: \"cf4056da-d26f-4b36-b598-50e73d849eed\") " Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.749543 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-utilities" (OuterVolumeSpecName: "utilities") pod "cf4056da-d26f-4b36-b598-50e73d849eed" (UID: "cf4056da-d26f-4b36-b598-50e73d849eed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.749766 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.753603 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf4056da-d26f-4b36-b598-50e73d849eed-kube-api-access-rm97q" (OuterVolumeSpecName: "kube-api-access-rm97q") pod "cf4056da-d26f-4b36-b598-50e73d849eed" (UID: "cf4056da-d26f-4b36-b598-50e73d849eed"). InnerVolumeSpecName "kube-api-access-rm97q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.761539 5014 scope.go:117] "RemoveContainer" containerID="f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af" Dec 05 11:03:59 crc kubenswrapper[5014]: E1205 11:03:59.761920 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af\": container with ID starting with f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af not found: ID does not exist" containerID="f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.761960 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af"} err="failed to get container status \"f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af\": rpc error: code = NotFound desc = could not find container \"f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af\": container with ID starting with f2b7273cafb3c18ca3bc72ceb7bdeb649a3043972370a589923ed82f7e6357af not found: ID does not exist" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.761983 5014 scope.go:117] "RemoveContainer" containerID="330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7" Dec 05 11:03:59 crc kubenswrapper[5014]: E1205 11:03:59.762218 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7\": container with ID starting with 330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7 not found: ID does not exist" containerID="330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.762234 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7"} err="failed to get container status \"330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7\": rpc error: code = NotFound desc = could not find container \"330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7\": container with ID starting with 330970e735fd5ee12cae472da6396c398126b1580e91c5cbf481b7c40cf11fd7 not found: ID does not exist" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.762248 5014 scope.go:117] "RemoveContainer" containerID="c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef" Dec 05 11:03:59 crc kubenswrapper[5014]: E1205 11:03:59.763883 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef\": container with ID starting with c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef not found: ID does not exist" containerID="c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.763912 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef"} err="failed to get container status \"c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef\": rpc error: code = NotFound desc = could not find container \"c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef\": container with ID starting with c700969bff9031cc887a782664ba7db4da32e9a191a87779d8d17addd78920ef not found: ID does not exist" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.793466 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf4056da-d26f-4b36-b598-50e73d849eed" (UID: "cf4056da-d26f-4b36-b598-50e73d849eed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.850473 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm97q\" (UniqueName: \"kubernetes.io/projected/cf4056da-d26f-4b36-b598-50e73d849eed-kube-api-access-rm97q\") on node \"crc\" DevicePath \"\"" Dec 05 11:03:59 crc kubenswrapper[5014]: I1205 11:03:59.850508 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4056da-d26f-4b36-b598-50e73d849eed-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:04:00 crc kubenswrapper[5014]: I1205 11:04:00.035886 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2dn4t"] Dec 05 11:04:00 crc kubenswrapper[5014]: I1205 11:04:00.041504 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2dn4t"] Dec 05 11:04:01 crc kubenswrapper[5014]: I1205 11:04:01.339224 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf4056da-d26f-4b36-b598-50e73d849eed" path="/var/lib/kubelet/pods/cf4056da-d26f-4b36-b598-50e73d849eed/volumes" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.130633 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2"] Dec 05 11:04:04 crc kubenswrapper[5014]: E1205 11:04:04.131146 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70125d36-6327-44d7-b08e-485e002d024e" containerName="util" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.131159 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="70125d36-6327-44d7-b08e-485e002d024e" containerName="util" Dec 05 11:04:04 crc kubenswrapper[5014]: E1205 11:04:04.131177 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70125d36-6327-44d7-b08e-485e002d024e" containerName="pull" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.131183 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="70125d36-6327-44d7-b08e-485e002d024e" containerName="pull" Dec 05 11:04:04 crc kubenswrapper[5014]: E1205 11:04:04.131189 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70125d36-6327-44d7-b08e-485e002d024e" containerName="extract" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.131195 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="70125d36-6327-44d7-b08e-485e002d024e" containerName="extract" Dec 05 11:04:04 crc kubenswrapper[5014]: E1205 11:04:04.131210 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4056da-d26f-4b36-b598-50e73d849eed" containerName="extract-utilities" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.131215 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4056da-d26f-4b36-b598-50e73d849eed" containerName="extract-utilities" Dec 05 11:04:04 crc kubenswrapper[5014]: E1205 11:04:04.131222 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4056da-d26f-4b36-b598-50e73d849eed" containerName="extract-content" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.131227 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4056da-d26f-4b36-b598-50e73d849eed" containerName="extract-content" Dec 05 11:04:04 crc kubenswrapper[5014]: E1205 11:04:04.131241 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4056da-d26f-4b36-b598-50e73d849eed" containerName="registry-server" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.131246 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4056da-d26f-4b36-b598-50e73d849eed" containerName="registry-server" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.131359 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf4056da-d26f-4b36-b598-50e73d849eed" containerName="registry-server" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.131370 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="70125d36-6327-44d7-b08e-485e002d024e" containerName="extract" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.131809 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.134458 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-78ww9" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.221958 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2"] Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.313006 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqp9f\" (UniqueName: \"kubernetes.io/projected/bdd8a367-e716-47ca-99d2-4b9fe9af1f6e-kube-api-access-cqp9f\") pod \"openstack-operator-controller-operator-58785945fc-gt9n2\" (UID: \"bdd8a367-e716-47ca-99d2-4b9fe9af1f6e\") " pod="openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.414890 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqp9f\" (UniqueName: \"kubernetes.io/projected/bdd8a367-e716-47ca-99d2-4b9fe9af1f6e-kube-api-access-cqp9f\") pod \"openstack-operator-controller-operator-58785945fc-gt9n2\" (UID: \"bdd8a367-e716-47ca-99d2-4b9fe9af1f6e\") " pod="openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.450929 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqp9f\" (UniqueName: \"kubernetes.io/projected/bdd8a367-e716-47ca-99d2-4b9fe9af1f6e-kube-api-access-cqp9f\") pod \"openstack-operator-controller-operator-58785945fc-gt9n2\" (UID: \"bdd8a367-e716-47ca-99d2-4b9fe9af1f6e\") " pod="openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.456341 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2" Dec 05 11:04:04 crc kubenswrapper[5014]: I1205 11:04:04.907937 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2"] Dec 05 11:04:05 crc kubenswrapper[5014]: I1205 11:04:05.737880 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2" event={"ID":"bdd8a367-e716-47ca-99d2-4b9fe9af1f6e","Type":"ContainerStarted","Data":"290b9b8654f3280117f7a76f861b6afbd8c1801bb3158e52810810347f1454e5"} Dec 05 11:04:09 crc kubenswrapper[5014]: I1205 11:04:09.762704 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2" event={"ID":"bdd8a367-e716-47ca-99d2-4b9fe9af1f6e","Type":"ContainerStarted","Data":"aa798b3a9173520df64dd7e66d52e4a34564cac7ab1ba8c125565026832f934c"} Dec 05 11:04:09 crc kubenswrapper[5014]: I1205 11:04:09.763321 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2" Dec 05 11:04:09 crc kubenswrapper[5014]: I1205 11:04:09.788393 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2" podStartSLOduration=1.5154607 podStartE2EDuration="5.788374465s" podCreationTimestamp="2025-12-05 11:04:04 +0000 UTC" firstStartedPulling="2025-12-05 11:04:04.922890273 +0000 UTC m=+971.871007977" lastFinishedPulling="2025-12-05 11:04:09.195804038 +0000 UTC m=+976.143921742" observedRunningTime="2025-12-05 11:04:09.786248643 +0000 UTC m=+976.734366367" watchObservedRunningTime="2025-12-05 11:04:09.788374465 +0000 UTC m=+976.736492169" Dec 05 11:04:14 crc kubenswrapper[5014]: I1205 11:04:14.458428 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-58785945fc-gt9n2" Dec 05 11:04:32 crc kubenswrapper[5014]: I1205 11:04:32.936634 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:04:32 crc kubenswrapper[5014]: I1205 11:04:32.937155 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.095907 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.097176 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.101038 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.101878 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-rdk5v" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.101989 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.103576 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-wzjv6" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.112145 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.117463 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.159533 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.159987 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8j65\" (UniqueName: \"kubernetes.io/projected/26989151-2ab4-4ae1-9d53-f9c038fba7e1-kube-api-access-p8j65\") pod \"cinder-operator-controller-manager-859b6ccc6-dtmzt\" (UID: \"26989151-2ab4-4ae1-9d53-f9c038fba7e1\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.160033 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjj4x\" (UniqueName: \"kubernetes.io/projected/fce514d3-328b-4d3f-b863-8fbb70bac467-kube-api-access-fjj4x\") pod \"barbican-operator-controller-manager-7d9dfd778-txc7h\" (UID: \"fce514d3-328b-4d3f-b863-8fbb70bac467\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.160766 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.173810 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-ms5zb" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.182904 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.186809 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.197783 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-7kkct" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.231348 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.237349 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.259330 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.260625 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.263305 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd67m\" (UniqueName: \"kubernetes.io/projected/90daaa58-8638-46b7-9492-27f70cc124a8-kube-api-access-gd67m\") pod \"designate-operator-controller-manager-78b4bc895b-pdzgg\" (UID: \"90daaa58-8638-46b7-9492-27f70cc124a8\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.263346 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zszwh\" (UniqueName: \"kubernetes.io/projected/4e5afc7a-459a-4a76-bf92-fd47a823833e-kube-api-access-zszwh\") pod \"glance-operator-controller-manager-77987cd8cd-xvrmp\" (UID: \"4e5afc7a-459a-4a76-bf92-fd47a823833e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.263430 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8j65\" (UniqueName: \"kubernetes.io/projected/26989151-2ab4-4ae1-9d53-f9c038fba7e1-kube-api-access-p8j65\") pod \"cinder-operator-controller-manager-859b6ccc6-dtmzt\" (UID: \"26989151-2ab4-4ae1-9d53-f9c038fba7e1\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.263461 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjj4x\" (UniqueName: \"kubernetes.io/projected/fce514d3-328b-4d3f-b863-8fbb70bac467-kube-api-access-fjj4x\") pod \"barbican-operator-controller-manager-7d9dfd778-txc7h\" (UID: \"fce514d3-328b-4d3f-b863-8fbb70bac467\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.273861 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-dqnwr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.274676 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.293353 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.295026 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.297534 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.299020 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.300764 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-dvjv4" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.304727 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-gw8kj" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.304979 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.305818 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.306546 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.322673 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.324931 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8j65\" (UniqueName: \"kubernetes.io/projected/26989151-2ab4-4ae1-9d53-f9c038fba7e1-kube-api-access-p8j65\") pod \"cinder-operator-controller-manager-859b6ccc6-dtmzt\" (UID: \"26989151-2ab4-4ae1-9d53-f9c038fba7e1\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.325178 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-txkbq" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.326169 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.336719 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.344396 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjj4x\" (UniqueName: \"kubernetes.io/projected/fce514d3-328b-4d3f-b863-8fbb70bac467-kube-api-access-fjj4x\") pod \"barbican-operator-controller-manager-7d9dfd778-txc7h\" (UID: \"fce514d3-328b-4d3f-b863-8fbb70bac467\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.344601 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.345649 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.351814 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-mrxqc" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.362336 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.363567 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.364920 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4t64\" (UniqueName: \"kubernetes.io/projected/17fd6d59-b4b9-4dea-b697-3998c5d10976-kube-api-access-g4t64\") pod \"ironic-operator-controller-manager-6c548fd776-n7nfr\" (UID: \"17fd6d59-b4b9-4dea-b697-3998c5d10976\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.365179 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwwzf\" (UniqueName: \"kubernetes.io/projected/02a9a463-6c8c-4771-b583-6ea38f60b446-kube-api-access-dwwzf\") pod \"keystone-operator-controller-manager-7765d96ddf-b9d89\" (UID: \"02a9a463-6c8c-4771-b583-6ea38f60b446\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.365326 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.365407 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcrx8\" (UniqueName: \"kubernetes.io/projected/2720df25-8eec-42e6-8c03-8b9d18314712-kube-api-access-lcrx8\") pod \"heat-operator-controller-manager-5f64f6f8bb-422qp\" (UID: \"2720df25-8eec-42e6-8c03-8b9d18314712\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.365525 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxd7r\" (UniqueName: \"kubernetes.io/projected/344c7e6d-3b0d-4874-b9f1-40b7ae307199-kube-api-access-xxd7r\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.365562 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gd67m\" (UniqueName: \"kubernetes.io/projected/90daaa58-8638-46b7-9492-27f70cc124a8-kube-api-access-gd67m\") pod \"designate-operator-controller-manager-78b4bc895b-pdzgg\" (UID: \"90daaa58-8638-46b7-9492-27f70cc124a8\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.365586 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hthnm\" (UniqueName: \"kubernetes.io/projected/aaf5b26b-5eaf-4143-b78f-69f8c976c10a-kube-api-access-hthnm\") pod \"horizon-operator-controller-manager-68c6d99b8f-pdg4w\" (UID: \"aaf5b26b-5eaf-4143-b78f-69f8c976c10a\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.365609 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zszwh\" (UniqueName: \"kubernetes.io/projected/4e5afc7a-459a-4a76-bf92-fd47a823833e-kube-api-access-zszwh\") pod \"glance-operator-controller-manager-77987cd8cd-xvrmp\" (UID: \"4e5afc7a-459a-4a76-bf92-fd47a823833e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.369656 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-wkfds" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.375339 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.383185 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.422623 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gd67m\" (UniqueName: \"kubernetes.io/projected/90daaa58-8638-46b7-9492-27f70cc124a8-kube-api-access-gd67m\") pod \"designate-operator-controller-manager-78b4bc895b-pdzgg\" (UID: \"90daaa58-8638-46b7-9492-27f70cc124a8\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.426975 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.428760 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.441396 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.443015 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.449173 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-xg8sr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.450067 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-f78wq" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.455468 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zszwh\" (UniqueName: \"kubernetes.io/projected/4e5afc7a-459a-4a76-bf92-fd47a823833e-kube-api-access-zszwh\") pod \"glance-operator-controller-manager-77987cd8cd-xvrmp\" (UID: \"4e5afc7a-459a-4a76-bf92-fd47a823833e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.466976 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-827cz\" (UniqueName: \"kubernetes.io/projected/2779b764-e7b5-448c-b189-9e450b7123cb-kube-api-access-827cz\") pod \"mariadb-operator-controller-manager-56bbcc9d85-h9j4b\" (UID: \"2779b764-e7b5-448c-b189-9e450b7123cb\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.467043 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsf78\" (UniqueName: \"kubernetes.io/projected/18a639b9-d602-4c6d-8c71-28611cbd65bf-kube-api-access-jsf78\") pod \"manila-operator-controller-manager-7c79b5df47-tvfjq\" (UID: \"18a639b9-d602-4c6d-8c71-28611cbd65bf\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.467082 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4t64\" (UniqueName: \"kubernetes.io/projected/17fd6d59-b4b9-4dea-b697-3998c5d10976-kube-api-access-g4t64\") pod \"ironic-operator-controller-manager-6c548fd776-n7nfr\" (UID: \"17fd6d59-b4b9-4dea-b697-3998c5d10976\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.467122 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwwzf\" (UniqueName: \"kubernetes.io/projected/02a9a463-6c8c-4771-b583-6ea38f60b446-kube-api-access-dwwzf\") pod \"keystone-operator-controller-manager-7765d96ddf-b9d89\" (UID: \"02a9a463-6c8c-4771-b583-6ea38f60b446\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.467145 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.467170 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcrx8\" (UniqueName: \"kubernetes.io/projected/2720df25-8eec-42e6-8c03-8b9d18314712-kube-api-access-lcrx8\") pod \"heat-operator-controller-manager-5f64f6f8bb-422qp\" (UID: \"2720df25-8eec-42e6-8c03-8b9d18314712\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.467201 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxd7r\" (UniqueName: \"kubernetes.io/projected/344c7e6d-3b0d-4874-b9f1-40b7ae307199-kube-api-access-xxd7r\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.467229 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65nv2\" (UniqueName: \"kubernetes.io/projected/7a3504d5-c870-42a1-8cb4-cceed657effe-kube-api-access-65nv2\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-kctmp\" (UID: \"7a3504d5-c870-42a1-8cb4-cceed657effe\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.467255 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hthnm\" (UniqueName: \"kubernetes.io/projected/aaf5b26b-5eaf-4143-b78f-69f8c976c10a-kube-api-access-hthnm\") pod \"horizon-operator-controller-manager-68c6d99b8f-pdg4w\" (UID: \"aaf5b26b-5eaf-4143-b78f-69f8c976c10a\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.467434 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" Dec 05 11:04:34 crc kubenswrapper[5014]: E1205 11:04:34.467708 5014 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:34 crc kubenswrapper[5014]: E1205 11:04:34.467789 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert podName:344c7e6d-3b0d-4874-b9f1-40b7ae307199 nodeName:}" failed. No retries permitted until 2025-12-05 11:04:34.967746158 +0000 UTC m=+1001.915863862 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert") pod "infra-operator-controller-manager-57548d458d-lsnnj" (UID: "344c7e6d-3b0d-4874-b9f1-40b7ae307199") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.476510 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.488985 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.491690 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.499712 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.514335 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.516323 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.535591 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-k59l7" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.537567 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.541374 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.542536 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.552663 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-h5f99" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.576713 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxd7r\" (UniqueName: \"kubernetes.io/projected/344c7e6d-3b0d-4874-b9f1-40b7ae307199-kube-api-access-xxd7r\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.578125 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65nv2\" (UniqueName: \"kubernetes.io/projected/7a3504d5-c870-42a1-8cb4-cceed657effe-kube-api-access-65nv2\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-kctmp\" (UID: \"7a3504d5-c870-42a1-8cb4-cceed657effe\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.578216 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvcpp\" (UniqueName: \"kubernetes.io/projected/355e95da-4f3d-4dce-b35e-79162bedce09-kube-api-access-wvcpp\") pod \"nova-operator-controller-manager-697bc559fc-t8lzs\" (UID: \"355e95da-4f3d-4dce-b35e-79162bedce09\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.578251 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-827cz\" (UniqueName: \"kubernetes.io/projected/2779b764-e7b5-448c-b189-9e450b7123cb-kube-api-access-827cz\") pod \"mariadb-operator-controller-manager-56bbcc9d85-h9j4b\" (UID: \"2779b764-e7b5-448c-b189-9e450b7123cb\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.578292 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkjdc\" (UniqueName: \"kubernetes.io/projected/5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37-kube-api-access-mkjdc\") pod \"octavia-operator-controller-manager-998648c74-v2hdr\" (UID: \"5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.578315 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsf78\" (UniqueName: \"kubernetes.io/projected/18a639b9-d602-4c6d-8c71-28611cbd65bf-kube-api-access-jsf78\") pod \"manila-operator-controller-manager-7c79b5df47-tvfjq\" (UID: \"18a639b9-d602-4c6d-8c71-28611cbd65bf\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.580094 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hthnm\" (UniqueName: \"kubernetes.io/projected/aaf5b26b-5eaf-4143-b78f-69f8c976c10a-kube-api-access-hthnm\") pod \"horizon-operator-controller-manager-68c6d99b8f-pdg4w\" (UID: \"aaf5b26b-5eaf-4143-b78f-69f8c976c10a\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.580485 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwwzf\" (UniqueName: \"kubernetes.io/projected/02a9a463-6c8c-4771-b583-6ea38f60b446-kube-api-access-dwwzf\") pod \"keystone-operator-controller-manager-7765d96ddf-b9d89\" (UID: \"02a9a463-6c8c-4771-b583-6ea38f60b446\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.585526 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4t64\" (UniqueName: \"kubernetes.io/projected/17fd6d59-b4b9-4dea-b697-3998c5d10976-kube-api-access-g4t64\") pod \"ironic-operator-controller-manager-6c548fd776-n7nfr\" (UID: \"17fd6d59-b4b9-4dea-b697-3998c5d10976\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.589996 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcrx8\" (UniqueName: \"kubernetes.io/projected/2720df25-8eec-42e6-8c03-8b9d18314712-kube-api-access-lcrx8\") pod \"heat-operator-controller-manager-5f64f6f8bb-422qp\" (UID: \"2720df25-8eec-42e6-8c03-8b9d18314712\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.612757 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65nv2\" (UniqueName: \"kubernetes.io/projected/7a3504d5-c870-42a1-8cb4-cceed657effe-kube-api-access-65nv2\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-kctmp\" (UID: \"7a3504d5-c870-42a1-8cb4-cceed657effe\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.625483 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsf78\" (UniqueName: \"kubernetes.io/projected/18a639b9-d602-4c6d-8c71-28611cbd65bf-kube-api-access-jsf78\") pod \"manila-operator-controller-manager-7c79b5df47-tvfjq\" (UID: \"18a639b9-d602-4c6d-8c71-28611cbd65bf\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.641783 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.645650 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.657889 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-827cz\" (UniqueName: \"kubernetes.io/projected/2779b764-e7b5-448c-b189-9e450b7123cb-kube-api-access-827cz\") pod \"mariadb-operator-controller-manager-56bbcc9d85-h9j4b\" (UID: \"2779b764-e7b5-448c-b189-9e450b7123cb\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.681627 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.682154 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvcpp\" (UniqueName: \"kubernetes.io/projected/355e95da-4f3d-4dce-b35e-79162bedce09-kube-api-access-wvcpp\") pod \"nova-operator-controller-manager-697bc559fc-t8lzs\" (UID: \"355e95da-4f3d-4dce-b35e-79162bedce09\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.682194 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkjdc\" (UniqueName: \"kubernetes.io/projected/5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37-kube-api-access-mkjdc\") pod \"octavia-operator-controller-manager-998648c74-v2hdr\" (UID: \"5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.697878 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.704384 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.708841 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.751438 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.763536 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.766788 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.805562 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-55bg4" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.805983 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.808199 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4kbx\" (UniqueName: \"kubernetes.io/projected/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-kube-api-access-c4kbx\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.808250 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.808311 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svmp7\" (UniqueName: \"kubernetes.io/projected/e3bb4ae5-8495-40c1-9a07-affdc714ebe0-kube-api-access-svmp7\") pod \"ovn-operator-controller-manager-b6456fdb6-svxkx\" (UID: \"e3bb4ae5-8495-40c1-9a07-affdc714ebe0\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.814562 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.836490 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.837793 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvcpp\" (UniqueName: \"kubernetes.io/projected/355e95da-4f3d-4dce-b35e-79162bedce09-kube-api-access-wvcpp\") pod \"nova-operator-controller-manager-697bc559fc-t8lzs\" (UID: \"355e95da-4f3d-4dce-b35e-79162bedce09\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.855751 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-mz8xb" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.856236 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.878197 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.886263 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.897114 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.901537 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkjdc\" (UniqueName: \"kubernetes.io/projected/5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37-kube-api-access-mkjdc\") pod \"octavia-operator-controller-manager-998648c74-v2hdr\" (UID: \"5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.925128 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4kbx\" (UniqueName: \"kubernetes.io/projected/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-kube-api-access-c4kbx\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.925176 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.925209 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svmp7\" (UniqueName: \"kubernetes.io/projected/e3bb4ae5-8495-40c1-9a07-affdc714ebe0-kube-api-access-svmp7\") pod \"ovn-operator-controller-manager-b6456fdb6-svxkx\" (UID: \"e3bb4ae5-8495-40c1-9a07-affdc714ebe0\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" Dec 05 11:04:34 crc kubenswrapper[5014]: E1205 11:04:34.925581 5014 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:04:34 crc kubenswrapper[5014]: E1205 11:04:34.925625 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert podName:2fba4b2b-28c2-41b6-86a8-7bb26b432f71 nodeName:}" failed. No retries permitted until 2025-12-05 11:04:35.425611675 +0000 UTC m=+1002.373729369 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" (UID: "2fba4b2b-28c2-41b6-86a8-7bb26b432f71") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.940090 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.954626 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.957823 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.957865 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.957880 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.959010 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.959093 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.961442 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk"] Dec 05 11:04:34 crc kubenswrapper[5014]: I1205 11:04:34.962882 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.005743 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-wcr94" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.006502 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-cbctt" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.006727 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-74m7t" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.018561 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.030635 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stds5\" (UniqueName: \"kubernetes.io/projected/831cc4a4-0997-4669-8c6d-9dbd8eaea14e-kube-api-access-stds5\") pod \"swift-operator-controller-manager-5f8c65bbfc-l9gtg\" (UID: \"831cc4a4-0997-4669-8c6d-9dbd8eaea14e\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.031219 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:35 crc kubenswrapper[5014]: E1205 11:04:35.031372 5014 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:35 crc kubenswrapper[5014]: E1205 11:04:35.031524 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert podName:344c7e6d-3b0d-4874-b9f1-40b7ae307199 nodeName:}" failed. No retries permitted until 2025-12-05 11:04:36.031507824 +0000 UTC m=+1002.979625518 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert") pod "infra-operator-controller-manager-57548d458d-lsnnj" (UID: "344c7e6d-3b0d-4874-b9f1-40b7ae307199") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.034909 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.037588 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4kbx\" (UniqueName: \"kubernetes.io/projected/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-kube-api-access-c4kbx\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.048495 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-fn75n"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.049786 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.072344 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-njw7p" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.086951 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svmp7\" (UniqueName: \"kubernetes.io/projected/e3bb4ae5-8495-40c1-9a07-affdc714ebe0-kube-api-access-svmp7\") pod \"ovn-operator-controller-manager-b6456fdb6-svxkx\" (UID: \"e3bb4ae5-8495-40c1-9a07-affdc714ebe0\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.165409 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.166318 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hk9r\" (UniqueName: \"kubernetes.io/projected/eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47-kube-api-access-9hk9r\") pod \"placement-operator-controller-manager-78f8948974-dbxkc\" (UID: \"eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.166361 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stds5\" (UniqueName: \"kubernetes.io/projected/831cc4a4-0997-4669-8c6d-9dbd8eaea14e-kube-api-access-stds5\") pod \"swift-operator-controller-manager-5f8c65bbfc-l9gtg\" (UID: \"831cc4a4-0997-4669-8c6d-9dbd8eaea14e\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.166526 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svv6g\" (UniqueName: \"kubernetes.io/projected/444b1e62-4d81-4e12-8110-9b5f680b3336-kube-api-access-svv6g\") pod \"telemetry-operator-controller-manager-76cc84c6bb-48xdk\" (UID: \"444b1e62-4d81-4e12-8110-9b5f680b3336\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.166608 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n49d\" (UniqueName: \"kubernetes.io/projected/6cb20401-6c79-43c4-a649-c1df07de148a-kube-api-access-5n49d\") pod \"test-operator-controller-manager-5854674fcc-fn75n\" (UID: \"6cb20401-6c79-43c4-a649-c1df07de148a\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.238630 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stds5\" (UniqueName: \"kubernetes.io/projected/831cc4a4-0997-4669-8c6d-9dbd8eaea14e-kube-api-access-stds5\") pod \"swift-operator-controller-manager-5f8c65bbfc-l9gtg\" (UID: \"831cc4a4-0997-4669-8c6d-9dbd8eaea14e\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.271709 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hk9r\" (UniqueName: \"kubernetes.io/projected/eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47-kube-api-access-9hk9r\") pod \"placement-operator-controller-manager-78f8948974-dbxkc\" (UID: \"eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.271846 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svv6g\" (UniqueName: \"kubernetes.io/projected/444b1e62-4d81-4e12-8110-9b5f680b3336-kube-api-access-svv6g\") pod \"telemetry-operator-controller-manager-76cc84c6bb-48xdk\" (UID: \"444b1e62-4d81-4e12-8110-9b5f680b3336\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.271897 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n49d\" (UniqueName: \"kubernetes.io/projected/6cb20401-6c79-43c4-a649-c1df07de148a-kube-api-access-5n49d\") pod \"test-operator-controller-manager-5854674fcc-fn75n\" (UID: \"6cb20401-6c79-43c4-a649-c1df07de148a\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.288651 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.298469 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.298500 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hk9r\" (UniqueName: \"kubernetes.io/projected/eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47-kube-api-access-9hk9r\") pod \"placement-operator-controller-manager-78f8948974-dbxkc\" (UID: \"eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.308695 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-ql2cs" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.314988 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svv6g\" (UniqueName: \"kubernetes.io/projected/444b1e62-4d81-4e12-8110-9b5f680b3336-kube-api-access-svv6g\") pod \"telemetry-operator-controller-manager-76cc84c6bb-48xdk\" (UID: \"444b1e62-4d81-4e12-8110-9b5f680b3336\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.315684 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n49d\" (UniqueName: \"kubernetes.io/projected/6cb20401-6c79-43c4-a649-c1df07de148a-kube-api-access-5n49d\") pod \"test-operator-controller-manager-5854674fcc-fn75n\" (UID: \"6cb20401-6c79-43c4-a649-c1df07de148a\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.317223 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-fn75n"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.333322 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.376033 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxssk\" (UniqueName: \"kubernetes.io/projected/3948de6a-fa93-4223-bda4-73afc54cc63c-kube-api-access-kxssk\") pod \"watcher-operator-controller-manager-769dc69bc-brstb\" (UID: \"3948de6a-fa93-4223-bda4-73afc54cc63c\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.378959 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.386600 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.435860 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.472328 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.474464 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.481110 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-mzfj9" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.481337 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.481450 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.500736 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.501300 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.522061 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxssk\" (UniqueName: \"kubernetes.io/projected/3948de6a-fa93-4223-bda4-73afc54cc63c-kube-api-access-kxssk\") pod \"watcher-operator-controller-manager-769dc69bc-brstb\" (UID: \"3948de6a-fa93-4223-bda4-73afc54cc63c\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.522116 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:35 crc kubenswrapper[5014]: E1205 11:04:35.522257 5014 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:04:35 crc kubenswrapper[5014]: E1205 11:04:35.522320 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert podName:2fba4b2b-28c2-41b6-86a8-7bb26b432f71 nodeName:}" failed. No retries permitted until 2025-12-05 11:04:36.522305513 +0000 UTC m=+1003.470423217 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" (UID: "2fba4b2b-28c2-41b6-86a8-7bb26b432f71") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.562676 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxssk\" (UniqueName: \"kubernetes.io/projected/3948de6a-fa93-4223-bda4-73afc54cc63c-kube-api-access-kxssk\") pod \"watcher-operator-controller-manager-769dc69bc-brstb\" (UID: \"3948de6a-fa93-4223-bda4-73afc54cc63c\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.569127 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.572103 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.574976 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-r2p9v" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.609693 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.639165 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z7rb\" (UniqueName: \"kubernetes.io/projected/ddc5d07f-9748-41de-82c4-cf52f02063ac-kube-api-access-7z7rb\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.639231 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.639298 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkmb9\" (UniqueName: \"kubernetes.io/projected/fce76d80-94e7-4c38-93c0-044691915f03-kube-api-access-kkmb9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hppvs\" (UID: \"fce76d80-94e7-4c38-93c0-044691915f03\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.639325 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.642324 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.741339 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.741416 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkmb9\" (UniqueName: \"kubernetes.io/projected/fce76d80-94e7-4c38-93c0-044691915f03-kube-api-access-kkmb9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hppvs\" (UID: \"fce76d80-94e7-4c38-93c0-044691915f03\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.741441 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.741488 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z7rb\" (UniqueName: \"kubernetes.io/projected/ddc5d07f-9748-41de-82c4-cf52f02063ac-kube-api-access-7z7rb\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:35 crc kubenswrapper[5014]: E1205 11:04:35.741841 5014 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 11:04:35 crc kubenswrapper[5014]: E1205 11:04:35.741883 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs podName:ddc5d07f-9748-41de-82c4-cf52f02063ac nodeName:}" failed. No retries permitted until 2025-12-05 11:04:36.241868869 +0000 UTC m=+1003.189986573 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs") pod "openstack-operator-controller-manager-69b6fcdff-tzs9c" (UID: "ddc5d07f-9748-41de-82c4-cf52f02063ac") : secret "metrics-server-cert" not found Dec 05 11:04:35 crc kubenswrapper[5014]: E1205 11:04:35.742057 5014 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 11:04:35 crc kubenswrapper[5014]: E1205 11:04:35.742146 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs podName:ddc5d07f-9748-41de-82c4-cf52f02063ac nodeName:}" failed. No retries permitted until 2025-12-05 11:04:36.242127705 +0000 UTC m=+1003.190245449 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs") pod "openstack-operator-controller-manager-69b6fcdff-tzs9c" (UID: "ddc5d07f-9748-41de-82c4-cf52f02063ac") : secret "webhook-server-cert" not found Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.781603 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z7rb\" (UniqueName: \"kubernetes.io/projected/ddc5d07f-9748-41de-82c4-cf52f02063ac-kube-api-access-7z7rb\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.794028 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkmb9\" (UniqueName: \"kubernetes.io/projected/fce76d80-94e7-4c38-93c0-044691915f03-kube-api-access-kkmb9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hppvs\" (UID: \"fce76d80-94e7-4c38-93c0-044691915f03\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.824819 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg"] Dec 05 11:04:35 crc kubenswrapper[5014]: I1205 11:04:35.975522 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" event={"ID":"90daaa58-8638-46b7-9492-27f70cc124a8","Type":"ContainerStarted","Data":"0680688ff19f4c3c31dface0fd8481fa3cd3497a186a8a83c69e539b0f55db7a"} Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.046356 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.046776 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.046502 5014 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.047387 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert podName:344c7e6d-3b0d-4874-b9f1-40b7ae307199 nodeName:}" failed. No retries permitted until 2025-12-05 11:04:38.047364306 +0000 UTC m=+1004.995482010 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert") pod "infra-operator-controller-manager-57548d458d-lsnnj" (UID: "344c7e6d-3b0d-4874-b9f1-40b7ae307199") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.250683 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.250803 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.250975 5014 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.251033 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs podName:ddc5d07f-9748-41de-82c4-cf52f02063ac nodeName:}" failed. No retries permitted until 2025-12-05 11:04:37.251014345 +0000 UTC m=+1004.199132049 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs") pod "openstack-operator-controller-manager-69b6fcdff-tzs9c" (UID: "ddc5d07f-9748-41de-82c4-cf52f02063ac") : secret "webhook-server-cert" not found Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.251402 5014 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.251544 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs podName:ddc5d07f-9748-41de-82c4-cf52f02063ac nodeName:}" failed. No retries permitted until 2025-12-05 11:04:37.251510707 +0000 UTC m=+1004.199628411 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs") pod "openstack-operator-controller-manager-69b6fcdff-tzs9c" (UID: "ddc5d07f-9748-41de-82c4-cf52f02063ac") : secret "metrics-server-cert" not found Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.279956 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp"] Dec 05 11:04:36 crc kubenswrapper[5014]: W1205 11:04:36.287634 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e5afc7a_459a_4a76_bf92_fd47a823833e.slice/crio-d59d89926c287f217e21bd42a9513ac4acb401fcbe87d531a56dc6bbbc8c4a5c WatchSource:0}: Error finding container d59d89926c287f217e21bd42a9513ac4acb401fcbe87d531a56dc6bbbc8c4a5c: Status 404 returned error can't find the container with id d59d89926c287f217e21bd42a9513ac4acb401fcbe87d531a56dc6bbbc8c4a5c Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.295575 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w"] Dec 05 11:04:36 crc kubenswrapper[5014]: W1205 11:04:36.301551 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26989151_2ab4_4ae1_9d53_f9c038fba7e1.slice/crio-f58e3d3b6a63535051c7471e10eb8ff8d5e889c8f1a05e76e7bb1ccb177e7811 WatchSource:0}: Error finding container f58e3d3b6a63535051c7471e10eb8ff8d5e889c8f1a05e76e7bb1ccb177e7811: Status 404 returned error can't find the container with id f58e3d3b6a63535051c7471e10eb8ff8d5e889c8f1a05e76e7bb1ccb177e7811 Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.302455 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.325809 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h"] Dec 05 11:04:36 crc kubenswrapper[5014]: W1205 11:04:36.344649 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfce514d3_328b_4d3f_b863_8fbb70bac467.slice/crio-0a15d0b7025051d79d917618d4d92ecc660225ec7e88ddf153d78352c522dc5a WatchSource:0}: Error finding container 0a15d0b7025051d79d917618d4d92ecc660225ec7e88ddf153d78352c522dc5a: Status 404 returned error can't find the container with id 0a15d0b7025051d79d917618d4d92ecc660225ec7e88ddf153d78352c522dc5a Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.460562 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.482734 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.495457 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.501212 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-fn75n"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.510317 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.518595 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.525054 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.554357 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.554536 5014 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.554588 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert podName:2fba4b2b-28c2-41b6-86a8-7bb26b432f71 nodeName:}" failed. No retries permitted until 2025-12-05 11:04:38.554572136 +0000 UTC m=+1005.502689840 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" (UID: "2fba4b2b-28c2-41b6-86a8-7bb26b432f71") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.727028 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.738133 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.746349 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc"] Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.768494 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb"] Dec 05 11:04:36 crc kubenswrapper[5014]: W1205 11:04:36.775555 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3948de6a_fa93_4223_bda4_73afc54cc63c.slice/crio-95e42a75d1f7938f647c489c501331b28cac6eb1c58ea142dd07ace3490d1a06 WatchSource:0}: Error finding container 95e42a75d1f7938f647c489c501331b28cac6eb1c58ea142dd07ace3490d1a06: Status 404 returned error can't find the container with id 95e42a75d1f7938f647c489c501331b28cac6eb1c58ea142dd07ace3490d1a06 Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.775768 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs"] Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.777285 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-stds5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-l9gtg_openstack-operators(831cc4a4-0997-4669-8c6d-9dbd8eaea14e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.779126 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kkmb9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-hppvs_openstack-operators(fce76d80-94e7-4c38-93c0-044691915f03): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.779306 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-stds5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-l9gtg_openstack-operators(831cc4a4-0997-4669-8c6d-9dbd8eaea14e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.780228 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" podUID="fce76d80-94e7-4c38-93c0-044691915f03" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.780662 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" podUID="831cc4a4-0997-4669-8c6d-9dbd8eaea14e" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.782102 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kxssk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-brstb_openstack-operators(3948de6a-fa93-4223-bda4-73afc54cc63c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.783680 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr"] Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.785025 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kxssk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-brstb_openstack-operators(3948de6a-fa93-4223-bda4-73afc54cc63c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.786688 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" podUID="3948de6a-fa93-4223-bda4-73afc54cc63c" Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.790256 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b"] Dec 05 11:04:36 crc kubenswrapper[5014]: W1205 11:04:36.795169 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5eb6b6bd_ee7f_4171_b8bf_c6fa71c35f37.slice/crio-0352a0547aec04ad71b03c046d2ce43b4655ddff775e95786e7579876805046e WatchSource:0}: Error finding container 0352a0547aec04ad71b03c046d2ce43b4655ddff775e95786e7579876805046e: Status 404 returned error can't find the container with id 0352a0547aec04ad71b03c046d2ce43b4655ddff775e95786e7579876805046e Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.798891 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk"] Dec 05 11:04:36 crc kubenswrapper[5014]: W1205 11:04:36.801432 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeabeab6b_00e0_4f5c_a2b9_5c6b6e99ab47.slice/crio-466e95e7046849c8a2b8a9641c3487827bc7005052b55cdfad29056c8d586af9 WatchSource:0}: Error finding container 466e95e7046849c8a2b8a9641c3487827bc7005052b55cdfad29056c8d586af9: Status 404 returned error can't find the container with id 466e95e7046849c8a2b8a9641c3487827bc7005052b55cdfad29056c8d586af9 Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.807641 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-827cz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-h9j4b_openstack-operators(2779b764-e7b5-448c-b189-9e450b7123cb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.809215 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mkjdc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-v2hdr_openstack-operators(5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.809347 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-svv6g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-48xdk_openstack-operators(444b1e62-4d81-4e12-8110-9b5f680b3336): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.819753 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-827cz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-h9j4b_openstack-operators(2779b764-e7b5-448c-b189-9e450b7123cb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.821748 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" podUID="2779b764-e7b5-448c-b189-9e450b7123cb" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.825030 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-svv6g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-48xdk_openstack-operators(444b1e62-4d81-4e12-8110-9b5f680b3336): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.825165 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mkjdc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-v2hdr_openstack-operators(5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.826391 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" podUID="5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.826500 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" podUID="444b1e62-4d81-4e12-8110-9b5f680b3336" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.831134 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9hk9r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-dbxkc_openstack-operators(eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.833997 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9hk9r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-dbxkc_openstack-operators(eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 11:04:36 crc kubenswrapper[5014]: E1205 11:04:36.836106 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" podUID="eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47" Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.989332 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" event={"ID":"2720df25-8eec-42e6-8c03-8b9d18314712","Type":"ContainerStarted","Data":"17d054a1f390ff855057d8f63d18df7fadd3bef492615624e5a24301b16edf5c"} Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.993317 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" event={"ID":"fce514d3-328b-4d3f-b863-8fbb70bac467","Type":"ContainerStarted","Data":"0a15d0b7025051d79d917618d4d92ecc660225ec7e88ddf153d78352c522dc5a"} Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.995960 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" event={"ID":"26989151-2ab4-4ae1-9d53-f9c038fba7e1","Type":"ContainerStarted","Data":"f58e3d3b6a63535051c7471e10eb8ff8d5e889c8f1a05e76e7bb1ccb177e7811"} Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.997316 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" event={"ID":"355e95da-4f3d-4dce-b35e-79162bedce09","Type":"ContainerStarted","Data":"a61418034510c2be23af27da7104e6ad6164a0b41c4d77e9d66a42ced0f77dae"} Dec 05 11:04:36 crc kubenswrapper[5014]: I1205 11:04:36.999123 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" event={"ID":"6cb20401-6c79-43c4-a649-c1df07de148a","Type":"ContainerStarted","Data":"058f1461a9d4ae4d5bef385a6fb5f7483563066a6b511ff68d0dc31c32523006"} Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.000675 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" event={"ID":"831cc4a4-0997-4669-8c6d-9dbd8eaea14e","Type":"ContainerStarted","Data":"be758680b771e6ae82b1abfb1ebe1158ef426ae9863ef1e091ad7b6d0b159664"} Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.002332 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" event={"ID":"e3bb4ae5-8495-40c1-9a07-affdc714ebe0","Type":"ContainerStarted","Data":"c7e9c5f6b6ada5f7a462d04746b7c22c9d0e7348c5dad48b265d5ae4d858fcae"} Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.003200 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" podUID="831cc4a4-0997-4669-8c6d-9dbd8eaea14e" Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.004593 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" event={"ID":"4e5afc7a-459a-4a76-bf92-fd47a823833e","Type":"ContainerStarted","Data":"d59d89926c287f217e21bd42a9513ac4acb401fcbe87d531a56dc6bbbc8c4a5c"} Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.007035 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" event={"ID":"18a639b9-d602-4c6d-8c71-28611cbd65bf","Type":"ContainerStarted","Data":"b1366678103eb9b3c4f520338ff5fdacc588fc28da31f6a0d5ca9ccb3118a611"} Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.010915 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" event={"ID":"5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37","Type":"ContainerStarted","Data":"0352a0547aec04ad71b03c046d2ce43b4655ddff775e95786e7579876805046e"} Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.019610 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" podUID="5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37" Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.032086 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" event={"ID":"02a9a463-6c8c-4771-b583-6ea38f60b446","Type":"ContainerStarted","Data":"52911bb02b1407af81c88a7beab1acd18309153f37f52825ce6432f78a347e05"} Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.037133 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" event={"ID":"aaf5b26b-5eaf-4143-b78f-69f8c976c10a","Type":"ContainerStarted","Data":"20b564d526304754fd624dde4a2b61607d54bc74b6d3d87afc13b485c36de000"} Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.046772 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" event={"ID":"7a3504d5-c870-42a1-8cb4-cceed657effe","Type":"ContainerStarted","Data":"fe2b56f7aa976e532b4d8b9e98519743287e9610121ed2f25fd96c78429c8498"} Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.049754 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" event={"ID":"17fd6d59-b4b9-4dea-b697-3998c5d10976","Type":"ContainerStarted","Data":"b70fe27ffa05864ff2aaeaaf8d0b83e22bc70ff94e5e6c020c21d7fb76e268fc"} Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.058869 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" event={"ID":"444b1e62-4d81-4e12-8110-9b5f680b3336","Type":"ContainerStarted","Data":"41b5b25d64d177d0558f6864946f021fac9232e3c6b3dd01640592dbb7c74625"} Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.062873 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" event={"ID":"eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47","Type":"ContainerStarted","Data":"466e95e7046849c8a2b8a9641c3487827bc7005052b55cdfad29056c8d586af9"} Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.063218 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" podUID="444b1e62-4d81-4e12-8110-9b5f680b3336" Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.066684 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" event={"ID":"3948de6a-fa93-4223-bda4-73afc54cc63c","Type":"ContainerStarted","Data":"95e42a75d1f7938f647c489c501331b28cac6eb1c58ea142dd07ace3490d1a06"} Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.066846 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" podUID="eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47" Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.074108 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" event={"ID":"fce76d80-94e7-4c38-93c0-044691915f03","Type":"ContainerStarted","Data":"eb93d28357645d9943cb10e44647219f9b81b37de8fc6f5441668051e7c117e1"} Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.075829 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" podUID="3948de6a-fa93-4223-bda4-73afc54cc63c" Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.076766 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" podUID="fce76d80-94e7-4c38-93c0-044691915f03" Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.087369 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" event={"ID":"2779b764-e7b5-448c-b189-9e450b7123cb","Type":"ContainerStarted","Data":"881e975c50f6b59fbf3c2669db05fb0a59372f573d1cac9bd1abee0626027768"} Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.091705 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" podUID="2779b764-e7b5-448c-b189-9e450b7123cb" Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.270907 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:37 crc kubenswrapper[5014]: I1205 11:04:37.271005 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.271138 5014 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.271166 5014 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.271196 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs podName:ddc5d07f-9748-41de-82c4-cf52f02063ac nodeName:}" failed. No retries permitted until 2025-12-05 11:04:39.271181222 +0000 UTC m=+1006.219298926 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs") pod "openstack-operator-controller-manager-69b6fcdff-tzs9c" (UID: "ddc5d07f-9748-41de-82c4-cf52f02063ac") : secret "metrics-server-cert" not found Dec 05 11:04:37 crc kubenswrapper[5014]: E1205 11:04:37.271372 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs podName:ddc5d07f-9748-41de-82c4-cf52f02063ac nodeName:}" failed. No retries permitted until 2025-12-05 11:04:39.271233853 +0000 UTC m=+1006.219351607 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs") pod "openstack-operator-controller-manager-69b6fcdff-tzs9c" (UID: "ddc5d07f-9748-41de-82c4-cf52f02063ac") : secret "webhook-server-cert" not found Dec 05 11:04:38 crc kubenswrapper[5014]: I1205 11:04:38.088198 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.088457 5014 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.088579 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert podName:344c7e6d-3b0d-4874-b9f1-40b7ae307199 nodeName:}" failed. No retries permitted until 2025-12-05 11:04:42.088551943 +0000 UTC m=+1009.036669697 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert") pod "infra-operator-controller-manager-57548d458d-lsnnj" (UID: "344c7e6d-3b0d-4874-b9f1-40b7ae307199") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.102799 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" podUID="444b1e62-4d81-4e12-8110-9b5f680b3336" Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.102943 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" podUID="5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37" Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.103007 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" podUID="fce76d80-94e7-4c38-93c0-044691915f03" Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.103062 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" podUID="831cc4a4-0997-4669-8c6d-9dbd8eaea14e" Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.103199 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" podUID="eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47" Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.103260 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" podUID="3948de6a-fa93-4223-bda4-73afc54cc63c" Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.103702 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" podUID="2779b764-e7b5-448c-b189-9e450b7123cb" Dec 05 11:04:38 crc kubenswrapper[5014]: I1205 11:04:38.603758 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.603963 5014 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:04:38 crc kubenswrapper[5014]: E1205 11:04:38.604251 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert podName:2fba4b2b-28c2-41b6-86a8-7bb26b432f71 nodeName:}" failed. No retries permitted until 2025-12-05 11:04:42.604227537 +0000 UTC m=+1009.552345281 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" (UID: "2fba4b2b-28c2-41b6-86a8-7bb26b432f71") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:04:39 crc kubenswrapper[5014]: I1205 11:04:39.321778 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:39 crc kubenswrapper[5014]: I1205 11:04:39.321934 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:39 crc kubenswrapper[5014]: E1205 11:04:39.322121 5014 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 11:04:39 crc kubenswrapper[5014]: E1205 11:04:39.322187 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs podName:ddc5d07f-9748-41de-82c4-cf52f02063ac nodeName:}" failed. No retries permitted until 2025-12-05 11:04:43.322168297 +0000 UTC m=+1010.270286001 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs") pod "openstack-operator-controller-manager-69b6fcdff-tzs9c" (UID: "ddc5d07f-9748-41de-82c4-cf52f02063ac") : secret "webhook-server-cert" not found Dec 05 11:04:39 crc kubenswrapper[5014]: E1205 11:04:39.322323 5014 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 11:04:39 crc kubenswrapper[5014]: E1205 11:04:39.322404 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs podName:ddc5d07f-9748-41de-82c4-cf52f02063ac nodeName:}" failed. No retries permitted until 2025-12-05 11:04:43.322383132 +0000 UTC m=+1010.270500926 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs") pod "openstack-operator-controller-manager-69b6fcdff-tzs9c" (UID: "ddc5d07f-9748-41de-82c4-cf52f02063ac") : secret "metrics-server-cert" not found Dec 05 11:04:42 crc kubenswrapper[5014]: I1205 11:04:42.168286 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:42 crc kubenswrapper[5014]: E1205 11:04:42.168537 5014 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:42 crc kubenswrapper[5014]: E1205 11:04:42.168814 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert podName:344c7e6d-3b0d-4874-b9f1-40b7ae307199 nodeName:}" failed. No retries permitted until 2025-12-05 11:04:50.168788113 +0000 UTC m=+1017.116905877 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert") pod "infra-operator-controller-manager-57548d458d-lsnnj" (UID: "344c7e6d-3b0d-4874-b9f1-40b7ae307199") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:42 crc kubenswrapper[5014]: I1205 11:04:42.674492 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:42 crc kubenswrapper[5014]: E1205 11:04:42.674695 5014 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:04:42 crc kubenswrapper[5014]: E1205 11:04:42.674764 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert podName:2fba4b2b-28c2-41b6-86a8-7bb26b432f71 nodeName:}" failed. No retries permitted until 2025-12-05 11:04:50.674746162 +0000 UTC m=+1017.622863866 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" (UID: "2fba4b2b-28c2-41b6-86a8-7bb26b432f71") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 11:04:43 crc kubenswrapper[5014]: I1205 11:04:43.389011 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:43 crc kubenswrapper[5014]: E1205 11:04:43.389203 5014 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 11:04:43 crc kubenswrapper[5014]: E1205 11:04:43.390314 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs podName:ddc5d07f-9748-41de-82c4-cf52f02063ac nodeName:}" failed. No retries permitted until 2025-12-05 11:04:51.390297283 +0000 UTC m=+1018.338414987 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs") pod "openstack-operator-controller-manager-69b6fcdff-tzs9c" (UID: "ddc5d07f-9748-41de-82c4-cf52f02063ac") : secret "metrics-server-cert" not found Dec 05 11:04:43 crc kubenswrapper[5014]: I1205 11:04:43.392446 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:43 crc kubenswrapper[5014]: E1205 11:04:43.392676 5014 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 11:04:43 crc kubenswrapper[5014]: E1205 11:04:43.392764 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs podName:ddc5d07f-9748-41de-82c4-cf52f02063ac nodeName:}" failed. No retries permitted until 2025-12-05 11:04:51.392742242 +0000 UTC m=+1018.340859996 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs") pod "openstack-operator-controller-manager-69b6fcdff-tzs9c" (UID: "ddc5d07f-9748-41de-82c4-cf52f02063ac") : secret "webhook-server-cert" not found Dec 05 11:04:49 crc kubenswrapper[5014]: E1205 11:04:49.545775 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 05 11:04:49 crc kubenswrapper[5014]: E1205 11:04:49.546504 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wvcpp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-t8lzs_openstack-operators(355e95da-4f3d-4dce-b35e-79162bedce09): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:04:50 crc kubenswrapper[5014]: E1205 11:04:50.091146 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 11:04:50 crc kubenswrapper[5014]: E1205 11:04:50.091571 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dwwzf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-b9d89_openstack-operators(02a9a463-6c8c-4771-b583-6ea38f60b446): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:04:50 crc kubenswrapper[5014]: I1205 11:04:50.196371 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:04:50 crc kubenswrapper[5014]: E1205 11:04:50.196525 5014 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:50 crc kubenswrapper[5014]: E1205 11:04:50.196585 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert podName:344c7e6d-3b0d-4874-b9f1-40b7ae307199 nodeName:}" failed. No retries permitted until 2025-12-05 11:05:06.196569473 +0000 UTC m=+1033.144687177 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert") pod "infra-operator-controller-manager-57548d458d-lsnnj" (UID: "344c7e6d-3b0d-4874-b9f1-40b7ae307199") : secret "infra-operator-webhook-server-cert" not found Dec 05 11:04:50 crc kubenswrapper[5014]: I1205 11:04:50.713477 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:50 crc kubenswrapper[5014]: I1205 11:04:50.722800 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2fba4b2b-28c2-41b6-86a8-7bb26b432f71-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd\" (UID: \"2fba4b2b-28c2-41b6-86a8-7bb26b432f71\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:50 crc kubenswrapper[5014]: I1205 11:04:50.954659 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.209247 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" event={"ID":"aaf5b26b-5eaf-4143-b78f-69f8c976c10a","Type":"ContainerStarted","Data":"f662937a5d8185eb9b27d8a2e27d46ecd2aa1505b890ee376f3200c5a8f059ed"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.222946 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" event={"ID":"e3bb4ae5-8495-40c1-9a07-affdc714ebe0","Type":"ContainerStarted","Data":"4160ab7bed9d1654452f83eb1d56172cdbea410a976799d771ad70220959c11f"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.232038 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" event={"ID":"4e5afc7a-459a-4a76-bf92-fd47a823833e","Type":"ContainerStarted","Data":"227b398b5ad8ffe98d62295fdb22417af246321f3cb6bc33b52d98944b618641"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.234924 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" event={"ID":"2720df25-8eec-42e6-8c03-8b9d18314712","Type":"ContainerStarted","Data":"5074ea677297b6391028a5f9732d8318aad22a9997753db265333646d3a63c6b"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.241363 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" event={"ID":"26989151-2ab4-4ae1-9d53-f9c038fba7e1","Type":"ContainerStarted","Data":"05bb378d23cc6ad90989d6746e738166c5494e851100eaead27638cee66208a0"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.243626 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" event={"ID":"90daaa58-8638-46b7-9492-27f70cc124a8","Type":"ContainerStarted","Data":"d6e3e4c8c1e9db48efa0599faf5af0b0c42a868887683904613d5ff9a0d70ece"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.251550 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" event={"ID":"6cb20401-6c79-43c4-a649-c1df07de148a","Type":"ContainerStarted","Data":"cc720686838d9ac5cbb0277da60688f01fd5adb718739e1eaf3e5d64616625d4"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.261596 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" event={"ID":"18a639b9-d602-4c6d-8c71-28611cbd65bf","Type":"ContainerStarted","Data":"6ee0983cf705117983bbbb9fcd12d204d90be4fb2dcae1db7c0876392d4ad3da"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.268811 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" event={"ID":"7a3504d5-c870-42a1-8cb4-cceed657effe","Type":"ContainerStarted","Data":"030691f9e8a9769026872a7656ddb127a8ce22fc0da9b7a8a012686e8ddc74e3"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.270228 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" event={"ID":"fce514d3-328b-4d3f-b863-8fbb70bac467","Type":"ContainerStarted","Data":"e62bc78246f7bb992dcadaf80acc376602dc43d88ab66a2efb3acf6daa423f18"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.282548 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" event={"ID":"17fd6d59-b4b9-4dea-b697-3998c5d10976","Type":"ContainerStarted","Data":"670ef8d7dcf5e36b2e55f82b4c76673d3f427819a1e1c4b14f7dad570255df7f"} Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.424075 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.424188 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.430011 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-metrics-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.430077 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ddc5d07f-9748-41de-82c4-cf52f02063ac-webhook-certs\") pod \"openstack-operator-controller-manager-69b6fcdff-tzs9c\" (UID: \"ddc5d07f-9748-41de-82c4-cf52f02063ac\") " pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:51 crc kubenswrapper[5014]: I1205 11:04:51.613209 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:04:52 crc kubenswrapper[5014]: I1205 11:04:52.647727 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd"] Dec 05 11:04:54 crc kubenswrapper[5014]: I1205 11:04:54.079465 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c"] Dec 05 11:04:54 crc kubenswrapper[5014]: I1205 11:04:54.362996 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" event={"ID":"2fba4b2b-28c2-41b6-86a8-7bb26b432f71","Type":"ContainerStarted","Data":"d6a4fb72b68e84fff533777cf09638b4b28388c511b09f8e21195bedadd1116b"} Dec 05 11:04:58 crc kubenswrapper[5014]: I1205 11:04:58.402970 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" event={"ID":"ddc5d07f-9748-41de-82c4-cf52f02063ac","Type":"ContainerStarted","Data":"6df145c4889658068c8f36b7dc580539b625801c3ee5bbe0205741526f075f32"} Dec 05 11:05:02 crc kubenswrapper[5014]: I1205 11:05:02.936634 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:05:02 crc kubenswrapper[5014]: I1205 11:05:02.936966 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:05:06 crc kubenswrapper[5014]: I1205 11:05:06.198352 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:05:06 crc kubenswrapper[5014]: I1205 11:05:06.207090 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/344c7e6d-3b0d-4874-b9f1-40b7ae307199-cert\") pod \"infra-operator-controller-manager-57548d458d-lsnnj\" (UID: \"344c7e6d-3b0d-4874-b9f1-40b7ae307199\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:05:06 crc kubenswrapper[5014]: I1205 11:05:06.483775 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-gw8kj" Dec 05 11:05:06 crc kubenswrapper[5014]: I1205 11:05:06.491400 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:05:16 crc kubenswrapper[5014]: E1205 11:05:16.527458 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 11:05:16 crc kubenswrapper[5014]: E1205 11:05:16.528074 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hthnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-pdg4w_openstack-operators(aaf5b26b-5eaf-4143-b78f-69f8c976c10a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:05:16 crc kubenswrapper[5014]: E1205 11:05:16.527661 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 11:05:16 crc kubenswrapper[5014]: E1205 11:05:16.528461 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jsf78,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-tvfjq_openstack-operators(18a639b9-d602-4c6d-8c71-28611cbd65bf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:05:16 crc kubenswrapper[5014]: E1205 11:05:16.529947 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" podUID="aaf5b26b-5eaf-4143-b78f-69f8c976c10a" Dec 05 11:05:16 crc kubenswrapper[5014]: E1205 11:05:16.529981 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" podUID="18a639b9-d602-4c6d-8c71-28611cbd65bf" Dec 05 11:05:16 crc kubenswrapper[5014]: I1205 11:05:16.729481 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" Dec 05 11:05:16 crc kubenswrapper[5014]: I1205 11:05:16.730080 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" Dec 05 11:05:16 crc kubenswrapper[5014]: I1205 11:05:16.731180 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" Dec 05 11:05:16 crc kubenswrapper[5014]: I1205 11:05:16.732859 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" Dec 05 11:05:17 crc kubenswrapper[5014]: E1205 11:05:17.161734 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 11:05:17 crc kubenswrapper[5014]: E1205 11:05:17.161941 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-svmp7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-svxkx_openstack-operators(e3bb4ae5-8495-40c1-9a07-affdc714ebe0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:05:17 crc kubenswrapper[5014]: E1205 11:05:17.163506 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" podUID="e3bb4ae5-8495-40c1-9a07-affdc714ebe0" Dec 05 11:05:17 crc kubenswrapper[5014]: E1205 11:05:17.167873 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 11:05:17 crc kubenswrapper[5014]: E1205 11:05:17.168032 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dwwzf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-b9d89_openstack-operators(02a9a463-6c8c-4771-b583-6ea38f60b446): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:05:17 crc kubenswrapper[5014]: E1205 11:05:17.169233 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" podUID="02a9a463-6c8c-4771-b583-6ea38f60b446" Dec 05 11:05:17 crc kubenswrapper[5014]: I1205 11:05:17.736282 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" Dec 05 11:05:17 crc kubenswrapper[5014]: I1205 11:05:17.738323 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" Dec 05 11:05:17 crc kubenswrapper[5014]: E1205 11:05:17.781924 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 05 11:05:17 crc kubenswrapper[5014]: E1205 11:05:17.782111 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kkmb9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-hppvs_openstack-operators(fce76d80-94e7-4c38-93c0-044691915f03): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:05:17 crc kubenswrapper[5014]: E1205 11:05:17.783384 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" podUID="fce76d80-94e7-4c38-93c0-044691915f03" Dec 05 11:05:19 crc kubenswrapper[5014]: I1205 11:05:19.004603 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj"] Dec 05 11:05:19 crc kubenswrapper[5014]: W1205 11:05:19.019988 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod344c7e6d_3b0d_4874_b9f1_40b7ae307199.slice/crio-76abae52f30e55c2676967e8bd76b1e4da2a1022d566d18ccae0bd40e34f5398 WatchSource:0}: Error finding container 76abae52f30e55c2676967e8bd76b1e4da2a1022d566d18ccae0bd40e34f5398: Status 404 returned error can't find the container with id 76abae52f30e55c2676967e8bd76b1e4da2a1022d566d18ccae0bd40e34f5398 Dec 05 11:05:19 crc kubenswrapper[5014]: E1205 11:05:19.767917 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" podUID="355e95da-4f3d-4dce-b35e-79162bedce09" Dec 05 11:05:19 crc kubenswrapper[5014]: I1205 11:05:19.768249 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" event={"ID":"3948de6a-fa93-4223-bda4-73afc54cc63c","Type":"ContainerStarted","Data":"1c57387651da1662379be9a9f53caae7c51e5551d323d2836ee2dfec68add5ec"} Dec 05 11:05:19 crc kubenswrapper[5014]: I1205 11:05:19.787064 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" event={"ID":"344c7e6d-3b0d-4874-b9f1-40b7ae307199","Type":"ContainerStarted","Data":"76abae52f30e55c2676967e8bd76b1e4da2a1022d566d18ccae0bd40e34f5398"} Dec 05 11:05:19 crc kubenswrapper[5014]: I1205 11:05:19.790496 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" event={"ID":"ddc5d07f-9748-41de-82c4-cf52f02063ac","Type":"ContainerStarted","Data":"aeab313b4ac9d8a4ed52efd7f15f7b49e10015cc760b45f8ada7916172662449"} Dec 05 11:05:19 crc kubenswrapper[5014]: I1205 11:05:19.791347 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:05:19 crc kubenswrapper[5014]: I1205 11:05:19.813952 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" event={"ID":"831cc4a4-0997-4669-8c6d-9dbd8eaea14e","Type":"ContainerStarted","Data":"258eec26b56d83eeb1255cb0e25544a73b512c408d056b3dbb66fafb4a53c620"} Dec 05 11:05:19 crc kubenswrapper[5014]: I1205 11:05:19.824254 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" event={"ID":"5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37","Type":"ContainerStarted","Data":"445c39ade8f2d1ecee63afd17b4aa4d878535ba8bcd8d9150789b173c12e183a"} Dec 05 11:05:19 crc kubenswrapper[5014]: I1205 11:05:19.840719 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" event={"ID":"444b1e62-4d81-4e12-8110-9b5f680b3336","Type":"ContainerStarted","Data":"6d90cae7dfa97ec84946893e64c9cf04a5468eaa3e1e003ab01d540f94eecc04"} Dec 05 11:05:19 crc kubenswrapper[5014]: I1205 11:05:19.851441 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" event={"ID":"eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47","Type":"ContainerStarted","Data":"2cdaab18cf602434d0308389b18d3aec59276838ff93cbe33a1f257988f88149"} Dec 05 11:05:19 crc kubenswrapper[5014]: I1205 11:05:19.870186 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" podStartSLOduration=44.870164849 podStartE2EDuration="44.870164849s" podCreationTimestamp="2025-12-05 11:04:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:05:19.858834993 +0000 UTC m=+1046.806952697" watchObservedRunningTime="2025-12-05 11:05:19.870164849 +0000 UTC m=+1046.818282553" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.872481 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" event={"ID":"26989151-2ab4-4ae1-9d53-f9c038fba7e1","Type":"ContainerStarted","Data":"f5c1d20df6c7e9b7b4de8ea24bff47af6fb737b0b5d6546f11079a8f77811060"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.872896 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.883462 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" event={"ID":"2fba4b2b-28c2-41b6-86a8-7bb26b432f71","Type":"ContainerStarted","Data":"2f0e6ffbf9b5eb88c412487236be137064def70c392ee329573ae501bc68a070"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.884754 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" event={"ID":"2779b764-e7b5-448c-b189-9e450b7123cb","Type":"ContainerStarted","Data":"e8310f15248b7a522225845467d6f3c4c377ee1d103eb743b1ac44a74474076c"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.885487 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.891336 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" event={"ID":"90daaa58-8638-46b7-9492-27f70cc124a8","Type":"ContainerStarted","Data":"b09e9c4369310e558f11544e5a3ddc8535a683bf98eb0a6edaf701d867105510"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.891821 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.893887 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" event={"ID":"aaf5b26b-5eaf-4143-b78f-69f8c976c10a","Type":"ContainerStarted","Data":"dd0548e7d8dcb10185e4b566f837a1c8778ed62eb298190fc769728a038ed810"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.894704 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.903539 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" event={"ID":"3948de6a-fa93-4223-bda4-73afc54cc63c","Type":"ContainerStarted","Data":"929b8cdd0afdb199a8e25a58f85c6dea5ff086a25e1881440304bbe954aff09d"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.904398 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.905846 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" event={"ID":"355e95da-4f3d-4dce-b35e-79162bedce09","Type":"ContainerStarted","Data":"964f22e7b2d58a06e0dc37c30cd6110d4fc6f236352384ab25d49455893f887c"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.917201 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dtmzt" podStartSLOduration=4.325613425 podStartE2EDuration="46.91718226s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.310118364 +0000 UTC m=+1003.258236068" lastFinishedPulling="2025-12-05 11:05:18.901687199 +0000 UTC m=+1045.849804903" observedRunningTime="2025-12-05 11:05:20.912658801 +0000 UTC m=+1047.860776515" watchObservedRunningTime="2025-12-05 11:05:20.91718226 +0000 UTC m=+1047.865299964" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.928811 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" event={"ID":"18a639b9-d602-4c6d-8c71-28611cbd65bf","Type":"ContainerStarted","Data":"2a7759a5498fa4346b0417c0c5347321ed06d4de9b695883553a7340280db86b"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.945238 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" event={"ID":"6cb20401-6c79-43c4-a649-c1df07de148a","Type":"ContainerStarted","Data":"25908c2eb2d89ba1db6d5a86e51cd0aa7975fc893c1eff549af26ed98bca6ff4"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.946429 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.949685 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.953222 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-pdzgg" podStartSLOduration=4.057249351 podStartE2EDuration="46.953193697s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:35.889121284 +0000 UTC m=+1002.837238988" lastFinishedPulling="2025-12-05 11:05:18.78506563 +0000 UTC m=+1045.733183334" observedRunningTime="2025-12-05 11:05:20.947819776 +0000 UTC m=+1047.895937510" watchObservedRunningTime="2025-12-05 11:05:20.953193697 +0000 UTC m=+1047.901311411" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.957600 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" event={"ID":"eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47","Type":"ContainerStarted","Data":"07f6b120685e1e19d22592076350f6e7120db604ea4a498ce257d67d20bc2f4c"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.958100 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.979262 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" event={"ID":"7a3504d5-c870-42a1-8cb4-cceed657effe","Type":"ContainerStarted","Data":"ff138317014e3628c01c8779253c7ae96b4e4114fb8e2a3cd0e8a4c47aec39c1"} Dec 05 11:05:20 crc kubenswrapper[5014]: I1205 11:05:20.982088 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.009622 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.027470 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" event={"ID":"2720df25-8eec-42e6-8c03-8b9d18314712","Type":"ContainerStarted","Data":"aaec54a961f396f536fd1f8c32365125b70228cd5aa823674f8880fd69e7b1c4"} Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.028835 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.037201 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.042455 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" event={"ID":"fce514d3-328b-4d3f-b863-8fbb70bac467","Type":"ContainerStarted","Data":"0ff6453eeef88ccfadcada38a4fd180cde0f8c9e3d6b01f627249ccf7143f980"} Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.042783 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.049947 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.062072 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" event={"ID":"02a9a463-6c8c-4771-b583-6ea38f60b446","Type":"ContainerStarted","Data":"dd0cfdaf0a39a2d23de817f1e325d2ca88e04b5ae50a32a3fe4d9f4088dba949"} Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.106308 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" event={"ID":"17fd6d59-b4b9-4dea-b697-3998c5d10976","Type":"ContainerStarted","Data":"155168cf4c84f4e024a7993c7a939244f34e1e94bd100fe46206fa037d56a8b9"} Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.108968 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.159874 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.161465 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-pdg4w" podStartSLOduration=33.405499055 podStartE2EDuration="47.161442037s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.304842325 +0000 UTC m=+1003.252960029" lastFinishedPulling="2025-12-05 11:04:50.060785307 +0000 UTC m=+1017.008903011" observedRunningTime="2025-12-05 11:05:21.072759208 +0000 UTC m=+1048.020876902" watchObservedRunningTime="2025-12-05 11:05:21.161442037 +0000 UTC m=+1048.109559751" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.162520 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" podStartSLOduration=5.722603862 podStartE2EDuration="46.162511093s" podCreationTimestamp="2025-12-05 11:04:35 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.781998642 +0000 UTC m=+1003.730116346" lastFinishedPulling="2025-12-05 11:05:17.221905873 +0000 UTC m=+1044.170023577" observedRunningTime="2025-12-05 11:05:21.160168637 +0000 UTC m=+1048.108286371" watchObservedRunningTime="2025-12-05 11:05:21.162511093 +0000 UTC m=+1048.110628807" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.167980 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" event={"ID":"e3bb4ae5-8495-40c1-9a07-affdc714ebe0","Type":"ContainerStarted","Data":"61ee8b2ce7bc876da0731deccbe5df47d3f83e3b194afde203faa538eb72ebb5"} Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.183525 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" event={"ID":"4e5afc7a-459a-4a76-bf92-fd47a823833e","Type":"ContainerStarted","Data":"e81ef86e3d8aec0817681883d4955ad4c326176cca11c11ca40a0b1a9a469905"} Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.183582 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.191530 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.222757 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-fn75n" podStartSLOduration=3.964382816 podStartE2EDuration="46.222729759s" podCreationTimestamp="2025-12-05 11:04:35 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.514291975 +0000 UTC m=+1003.462409679" lastFinishedPulling="2025-12-05 11:05:18.772638918 +0000 UTC m=+1045.720756622" observedRunningTime="2025-12-05 11:05:21.204302141 +0000 UTC m=+1048.152419855" watchObservedRunningTime="2025-12-05 11:05:21.222729759 +0000 UTC m=+1048.170847463" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.348998 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-tvfjq" podStartSLOduration=33.728099961 podStartE2EDuration="47.348975643s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.446962636 +0000 UTC m=+1003.395080340" lastFinishedPulling="2025-12-05 11:04:50.067838318 +0000 UTC m=+1017.015956022" observedRunningTime="2025-12-05 11:05:21.344582366 +0000 UTC m=+1048.292700070" watchObservedRunningTime="2025-12-05 11:05:21.348975643 +0000 UTC m=+1048.297093357" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.370890 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-422qp" podStartSLOduration=5.35599287 podStartE2EDuration="47.370875316s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.758295965 +0000 UTC m=+1003.706413669" lastFinishedPulling="2025-12-05 11:05:18.773178391 +0000 UTC m=+1045.721296115" observedRunningTime="2025-12-05 11:05:21.367557246 +0000 UTC m=+1048.315674950" watchObservedRunningTime="2025-12-05 11:05:21.370875316 +0000 UTC m=+1048.318993020" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.412662 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-n7nfr" podStartSLOduration=5.667106927 podStartE2EDuration="47.412646534s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.489355698 +0000 UTC m=+1003.437473402" lastFinishedPulling="2025-12-05 11:05:18.234895285 +0000 UTC m=+1045.183013009" observedRunningTime="2025-12-05 11:05:21.408630236 +0000 UTC m=+1048.356747950" watchObservedRunningTime="2025-12-05 11:05:21.412646534 +0000 UTC m=+1048.360764228" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.483605 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" podStartSLOduration=7.092609422 podStartE2EDuration="47.483590511s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.830800441 +0000 UTC m=+1003.778918145" lastFinishedPulling="2025-12-05 11:05:17.22178152 +0000 UTC m=+1044.169899234" observedRunningTime="2025-12-05 11:05:21.483459277 +0000 UTC m=+1048.431577001" watchObservedRunningTime="2025-12-05 11:05:21.483590511 +0000 UTC m=+1048.431708205" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.484692 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-kctmp" podStartSLOduration=5.101639937 podStartE2EDuration="47.484686727s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.517779519 +0000 UTC m=+1003.465897223" lastFinishedPulling="2025-12-05 11:05:18.900826309 +0000 UTC m=+1045.848944013" observedRunningTime="2025-12-05 11:05:21.451750146 +0000 UTC m=+1048.399867850" watchObservedRunningTime="2025-12-05 11:05:21.484686727 +0000 UTC m=+1048.432804431" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.518896 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txc7h" podStartSLOduration=4.971855299 podStartE2EDuration="47.51886519s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.352086926 +0000 UTC m=+1003.300204630" lastFinishedPulling="2025-12-05 11:05:18.899096817 +0000 UTC m=+1045.847214521" observedRunningTime="2025-12-05 11:05:21.506163671 +0000 UTC m=+1048.454281395" watchObservedRunningTime="2025-12-05 11:05:21.51886519 +0000 UTC m=+1048.466982904" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.570297 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-svxkx" podStartSLOduration=34.001519477 podStartE2EDuration="47.57025206s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.493836387 +0000 UTC m=+1003.441954081" lastFinishedPulling="2025-12-05 11:04:50.06256896 +0000 UTC m=+1017.010686664" observedRunningTime="2025-12-05 11:05:21.569106303 +0000 UTC m=+1048.517224007" watchObservedRunningTime="2025-12-05 11:05:21.57025206 +0000 UTC m=+1048.518369784" Dec 05 11:05:21 crc kubenswrapper[5014]: I1205 11:05:21.618917 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-xvrmp" podStartSLOduration=4.987505328 podStartE2EDuration="47.618883814s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.288262101 +0000 UTC m=+1003.236379805" lastFinishedPulling="2025-12-05 11:05:18.919640587 +0000 UTC m=+1045.867758291" observedRunningTime="2025-12-05 11:05:21.609028284 +0000 UTC m=+1048.557145998" watchObservedRunningTime="2025-12-05 11:05:21.618883814 +0000 UTC m=+1048.567001518" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.193318 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" event={"ID":"444b1e62-4d81-4e12-8110-9b5f680b3336","Type":"ContainerStarted","Data":"496dd62e3d0345f8f359f4059c338fc253239d59f3d8ef79709425b879ba3a67"} Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.193534 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.226096 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" event={"ID":"2fba4b2b-28c2-41b6-86a8-7bb26b432f71","Type":"ContainerStarted","Data":"ff311cdf78e2410b83f01c4ff82bab88237f4f66743f17529bcdd70b98df4d88"} Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.226340 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.231713 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" event={"ID":"2779b764-e7b5-448c-b189-9e450b7123cb","Type":"ContainerStarted","Data":"025cdc5538735f63881ee6c50c583ee222151b93ccc87cadafdf903fb753ed5e"} Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.231843 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.234439 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" event={"ID":"02a9a463-6c8c-4771-b583-6ea38f60b446","Type":"ContainerStarted","Data":"d070929cde95ed88414bce89c1a1541ff2eff1ad95cf14055597cf7cae0e5c1d"} Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.234591 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.236386 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" event={"ID":"831cc4a4-0997-4669-8c6d-9dbd8eaea14e","Type":"ContainerStarted","Data":"4a6da079554adaef45326bd28f685e646f2ec3294e11bf33604e30b8db710879"} Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.236925 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.242932 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" event={"ID":"5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37","Type":"ContainerStarted","Data":"2cc6b26ee9f3632e311cc4a47d07cd3778dbe14e3190da94e452b9504c00cd63"} Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.246320 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.285174 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" podStartSLOduration=7.872599092 podStartE2EDuration="48.285152106s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.808986959 +0000 UTC m=+1003.757104663" lastFinishedPulling="2025-12-05 11:05:17.221539973 +0000 UTC m=+1044.169657677" observedRunningTime="2025-12-05 11:05:22.272929878 +0000 UTC m=+1049.221047592" watchObservedRunningTime="2025-12-05 11:05:22.285152106 +0000 UTC m=+1049.233269810" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.307372 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" podStartSLOduration=7.35086138 podStartE2EDuration="48.307352787s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.806663303 +0000 UTC m=+1003.754781007" lastFinishedPulling="2025-12-05 11:05:17.76315471 +0000 UTC m=+1044.711272414" observedRunningTime="2025-12-05 11:05:22.298741687 +0000 UTC m=+1049.246859421" watchObservedRunningTime="2025-12-05 11:05:22.307352787 +0000 UTC m=+1049.255470491" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.332803 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" podStartSLOduration=27.152383473 podStartE2EDuration="48.332778945s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.806680884 +0000 UTC m=+1003.754798588" lastFinishedPulling="2025-12-05 11:04:57.987076346 +0000 UTC m=+1024.935194060" observedRunningTime="2025-12-05 11:05:22.31571729 +0000 UTC m=+1049.263835004" watchObservedRunningTime="2025-12-05 11:05:22.332778945 +0000 UTC m=+1049.280896650" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.353406 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" podStartSLOduration=5.933352957 podStartE2EDuration="48.353380367s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.500200402 +0000 UTC m=+1003.448318106" lastFinishedPulling="2025-12-05 11:05:18.920227812 +0000 UTC m=+1045.868345516" observedRunningTime="2025-12-05 11:05:22.339638953 +0000 UTC m=+1049.287756667" watchObservedRunningTime="2025-12-05 11:05:22.353380367 +0000 UTC m=+1049.301498081" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.362338 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" podStartSLOduration=7.917510974 podStartE2EDuration="48.362311664s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.776901418 +0000 UTC m=+1003.725019122" lastFinishedPulling="2025-12-05 11:05:17.221702108 +0000 UTC m=+1044.169819812" observedRunningTime="2025-12-05 11:05:22.359215099 +0000 UTC m=+1049.307332823" watchObservedRunningTime="2025-12-05 11:05:22.362311664 +0000 UTC m=+1049.310429368" Dec 05 11:05:22 crc kubenswrapper[5014]: I1205 11:05:22.646780 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" podStartSLOduration=24.362456786 podStartE2EDuration="48.646756s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:53.668603486 +0000 UTC m=+1020.616721190" lastFinishedPulling="2025-12-05 11:05:17.9529027 +0000 UTC m=+1044.901020404" observedRunningTime="2025-12-05 11:05:22.505345817 +0000 UTC m=+1049.453463531" watchObservedRunningTime="2025-12-05 11:05:22.646756 +0000 UTC m=+1049.594873704" Dec 05 11:05:23 crc kubenswrapper[5014]: I1205 11:05:23.274329 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" event={"ID":"355e95da-4f3d-4dce-b35e-79162bedce09","Type":"ContainerStarted","Data":"3ecc924a477dc20d9764ebdc1e5b92a00df397d5b8cc99f0c61373453235c7c6"} Dec 05 11:05:23 crc kubenswrapper[5014]: I1205 11:05:23.276459 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" Dec 05 11:05:23 crc kubenswrapper[5014]: I1205 11:05:23.349028 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" podStartSLOduration=3.8992807259999998 podStartE2EDuration="49.349009768s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.514701465 +0000 UTC m=+1003.462819169" lastFinishedPulling="2025-12-05 11:05:21.964430507 +0000 UTC m=+1048.912548211" observedRunningTime="2025-12-05 11:05:23.299331997 +0000 UTC m=+1050.247449711" watchObservedRunningTime="2025-12-05 11:05:23.349009768 +0000 UTC m=+1050.297127472" Dec 05 11:05:24 crc kubenswrapper[5014]: I1205 11:05:24.280087 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" event={"ID":"344c7e6d-3b0d-4874-b9f1-40b7ae307199","Type":"ContainerStarted","Data":"9c75dd23d45f896ed35c0c34f13e72f61d13178be32f97aeced73a68d5afd6fd"} Dec 05 11:05:24 crc kubenswrapper[5014]: I1205 11:05:24.280430 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" event={"ID":"344c7e6d-3b0d-4874-b9f1-40b7ae307199","Type":"ContainerStarted","Data":"a2cf1b247a51d4dfdfd71cc1cc089b7ee6e6074609826af77a45904a0c3f360a"} Dec 05 11:05:24 crc kubenswrapper[5014]: I1205 11:05:24.280854 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:05:24 crc kubenswrapper[5014]: I1205 11:05:24.283464 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-v2hdr" Dec 05 11:05:24 crc kubenswrapper[5014]: I1205 11:05:24.283553 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-l9gtg" Dec 05 11:05:24 crc kubenswrapper[5014]: I1205 11:05:24.309177 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" podStartSLOduration=45.701612887 podStartE2EDuration="50.309157184s" podCreationTimestamp="2025-12-05 11:04:34 +0000 UTC" firstStartedPulling="2025-12-05 11:05:19.037581309 +0000 UTC m=+1045.985699013" lastFinishedPulling="2025-12-05 11:05:23.645125606 +0000 UTC m=+1050.593243310" observedRunningTime="2025-12-05 11:05:24.305333831 +0000 UTC m=+1051.253451535" watchObservedRunningTime="2025-12-05 11:05:24.309157184 +0000 UTC m=+1051.257274888" Dec 05 11:05:24 crc kubenswrapper[5014]: I1205 11:05:24.899899 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-h9j4b" Dec 05 11:05:25 crc kubenswrapper[5014]: I1205 11:05:25.382392 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-dbxkc" Dec 05 11:05:25 crc kubenswrapper[5014]: I1205 11:05:25.438866 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-48xdk" Dec 05 11:05:25 crc kubenswrapper[5014]: I1205 11:05:25.646260 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-brstb" Dec 05 11:05:28 crc kubenswrapper[5014]: E1205 11:05:28.320217 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" podUID="fce76d80-94e7-4c38-93c0-044691915f03" Dec 05 11:05:30 crc kubenswrapper[5014]: I1205 11:05:30.963333 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd" Dec 05 11:05:31 crc kubenswrapper[5014]: I1205 11:05:31.620588 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-69b6fcdff-tzs9c" Dec 05 11:05:32 crc kubenswrapper[5014]: I1205 11:05:32.936772 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:05:32 crc kubenswrapper[5014]: I1205 11:05:32.936846 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:05:32 crc kubenswrapper[5014]: I1205 11:05:32.936902 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:05:32 crc kubenswrapper[5014]: I1205 11:05:32.937587 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4ddf8f910e52a088784fd2d469973cf4512542c6f65d5608f61ef0af3d2944f1"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:05:32 crc kubenswrapper[5014]: I1205 11:05:32.937638 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://4ddf8f910e52a088784fd2d469973cf4512542c6f65d5608f61ef0af3d2944f1" gracePeriod=600 Dec 05 11:05:34 crc kubenswrapper[5014]: I1205 11:05:34.755702 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b9d89" Dec 05 11:05:34 crc kubenswrapper[5014]: I1205 11:05:34.943193 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-t8lzs" Dec 05 11:05:36 crc kubenswrapper[5014]: I1205 11:05:36.373735 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="4ddf8f910e52a088784fd2d469973cf4512542c6f65d5608f61ef0af3d2944f1" exitCode=0 Dec 05 11:05:36 crc kubenswrapper[5014]: I1205 11:05:36.373791 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"4ddf8f910e52a088784fd2d469973cf4512542c6f65d5608f61ef0af3d2944f1"} Dec 05 11:05:36 crc kubenswrapper[5014]: I1205 11:05:36.373833 5014 scope.go:117] "RemoveContainer" containerID="38dc139c6b157093aa0187abc2a47c8fff469ab971f15976ee0dbc61fa5a9ede" Dec 05 11:05:36 crc kubenswrapper[5014]: I1205 11:05:36.497002 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lsnnj" Dec 05 11:05:38 crc kubenswrapper[5014]: I1205 11:05:38.399403 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"e69f8ff3539ebba47e81dc1689f38b27a404e4706e334acbe1fa267156045c14"} Dec 05 11:05:41 crc kubenswrapper[5014]: I1205 11:05:41.323003 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:05:42 crc kubenswrapper[5014]: I1205 11:05:42.444090 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" event={"ID":"fce76d80-94e7-4c38-93c0-044691915f03","Type":"ContainerStarted","Data":"05c5ba7f897cde4942869bb82a2ef63e2bf54b2a2b13752009e48fc0c03e02dd"} Dec 05 11:05:42 crc kubenswrapper[5014]: I1205 11:05:42.469785 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hppvs" podStartSLOduration=2.420036778 podStartE2EDuration="1m7.469612534s" podCreationTimestamp="2025-12-05 11:04:35 +0000 UTC" firstStartedPulling="2025-12-05 11:04:36.778961749 +0000 UTC m=+1003.727079453" lastFinishedPulling="2025-12-05 11:05:41.828537495 +0000 UTC m=+1068.776655209" observedRunningTime="2025-12-05 11:05:42.466719573 +0000 UTC m=+1069.414837297" watchObservedRunningTime="2025-12-05 11:05:42.469612534 +0000 UTC m=+1069.417730248" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.090025 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4s9jk"] Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.091708 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.094862 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.095130 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-l898m" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.097128 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.097251 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.112962 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4s9jk"] Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.175721 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9wv8"] Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.217282 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.217169 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9wv8"] Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.220301 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.289540 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x994q\" (UniqueName: \"kubernetes.io/projected/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-kube-api-access-x994q\") pod \"dnsmasq-dns-675f4bcbfc-4s9jk\" (UID: \"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.289593 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-config\") pod \"dnsmasq-dns-675f4bcbfc-4s9jk\" (UID: \"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.392402 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-config\") pod \"dnsmasq-dns-78dd6ddcc-v9wv8\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.392474 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-v9wv8\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.392505 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x994q\" (UniqueName: \"kubernetes.io/projected/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-kube-api-access-x994q\") pod \"dnsmasq-dns-675f4bcbfc-4s9jk\" (UID: \"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.392532 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-config\") pod \"dnsmasq-dns-675f4bcbfc-4s9jk\" (UID: \"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.392620 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ght2h\" (UniqueName: \"kubernetes.io/projected/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-kube-api-access-ght2h\") pod \"dnsmasq-dns-78dd6ddcc-v9wv8\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.393975 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-config\") pod \"dnsmasq-dns-675f4bcbfc-4s9jk\" (UID: \"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.412423 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x994q\" (UniqueName: \"kubernetes.io/projected/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-kube-api-access-x994q\") pod \"dnsmasq-dns-675f4bcbfc-4s9jk\" (UID: \"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.414610 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.494034 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-config\") pod \"dnsmasq-dns-78dd6ddcc-v9wv8\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.495059 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-config\") pod \"dnsmasq-dns-78dd6ddcc-v9wv8\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.495186 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-v9wv8\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.497838 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ght2h\" (UniqueName: \"kubernetes.io/projected/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-kube-api-access-ght2h\") pod \"dnsmasq-dns-78dd6ddcc-v9wv8\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.498691 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-v9wv8\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.526695 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ght2h\" (UniqueName: \"kubernetes.io/projected/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-kube-api-access-ght2h\") pod \"dnsmasq-dns-78dd6ddcc-v9wv8\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.537037 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:05:57 crc kubenswrapper[5014]: I1205 11:05:57.827324 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4s9jk"] Dec 05 11:05:57 crc kubenswrapper[5014]: W1205 11:05:57.833238 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9b53e21_61a6_4799_bfca_c01b3fd1a8a8.slice/crio-0ead8cc15be0b79541e00db50a89b3a2ab6fd05e9a8d648c7c2ed6d7e36a5aec WatchSource:0}: Error finding container 0ead8cc15be0b79541e00db50a89b3a2ab6fd05e9a8d648c7c2ed6d7e36a5aec: Status 404 returned error can't find the container with id 0ead8cc15be0b79541e00db50a89b3a2ab6fd05e9a8d648c7c2ed6d7e36a5aec Dec 05 11:05:58 crc kubenswrapper[5014]: W1205 11:05:58.086135 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89a9e45a_2b93_43f6_9712_b7c2782ae5ad.slice/crio-33f20504d7bc65c0002ec4d6492eae8f08567a67627cd8901bcbcf0ecbdaf7fa WatchSource:0}: Error finding container 33f20504d7bc65c0002ec4d6492eae8f08567a67627cd8901bcbcf0ecbdaf7fa: Status 404 returned error can't find the container with id 33f20504d7bc65c0002ec4d6492eae8f08567a67627cd8901bcbcf0ecbdaf7fa Dec 05 11:05:58 crc kubenswrapper[5014]: I1205 11:05:58.089755 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9wv8"] Dec 05 11:05:58 crc kubenswrapper[5014]: I1205 11:05:58.571470 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" event={"ID":"89a9e45a-2b93-43f6-9712-b7c2782ae5ad","Type":"ContainerStarted","Data":"33f20504d7bc65c0002ec4d6492eae8f08567a67627cd8901bcbcf0ecbdaf7fa"} Dec 05 11:05:58 crc kubenswrapper[5014]: I1205 11:05:58.572585 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" event={"ID":"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8","Type":"ContainerStarted","Data":"0ead8cc15be0b79541e00db50a89b3a2ab6fd05e9a8d648c7c2ed6d7e36a5aec"} Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.142501 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4s9jk"] Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.190948 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c68wl"] Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.193319 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.200787 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c68wl"] Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.245735 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-config\") pod \"dnsmasq-dns-666b6646f7-c68wl\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.245869 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcqvn\" (UniqueName: \"kubernetes.io/projected/d669315d-fcb3-4d56-a85a-532a432c957d-kube-api-access-gcqvn\") pod \"dnsmasq-dns-666b6646f7-c68wl\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.245911 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-dns-svc\") pod \"dnsmasq-dns-666b6646f7-c68wl\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.347060 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-dns-svc\") pod \"dnsmasq-dns-666b6646f7-c68wl\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.347147 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-config\") pod \"dnsmasq-dns-666b6646f7-c68wl\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.347203 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcqvn\" (UniqueName: \"kubernetes.io/projected/d669315d-fcb3-4d56-a85a-532a432c957d-kube-api-access-gcqvn\") pod \"dnsmasq-dns-666b6646f7-c68wl\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.347921 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-dns-svc\") pod \"dnsmasq-dns-666b6646f7-c68wl\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.348186 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-config\") pod \"dnsmasq-dns-666b6646f7-c68wl\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.371727 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcqvn\" (UniqueName: \"kubernetes.io/projected/d669315d-fcb3-4d56-a85a-532a432c957d-kube-api-access-gcqvn\") pod \"dnsmasq-dns-666b6646f7-c68wl\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.488303 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9wv8"] Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.518751 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-6dh7d"] Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.521309 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.533918 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.540377 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-6dh7d"] Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.551793 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-config\") pod \"dnsmasq-dns-57d769cc4f-6dh7d\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.551910 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rssxf\" (UniqueName: \"kubernetes.io/projected/c773db78-f079-4d0b-8909-124b9e1d15bf-kube-api-access-rssxf\") pod \"dnsmasq-dns-57d769cc4f-6dh7d\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.551994 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-6dh7d\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.656318 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rssxf\" (UniqueName: \"kubernetes.io/projected/c773db78-f079-4d0b-8909-124b9e1d15bf-kube-api-access-rssxf\") pod \"dnsmasq-dns-57d769cc4f-6dh7d\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.656492 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-6dh7d\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.656574 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-config\") pod \"dnsmasq-dns-57d769cc4f-6dh7d\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.657561 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-6dh7d\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.657727 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-config\") pod \"dnsmasq-dns-57d769cc4f-6dh7d\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.698309 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rssxf\" (UniqueName: \"kubernetes.io/projected/c773db78-f079-4d0b-8909-124b9e1d15bf-kube-api-access-rssxf\") pod \"dnsmasq-dns-57d769cc4f-6dh7d\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:00 crc kubenswrapper[5014]: I1205 11:06:00.853738 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:01 crc kubenswrapper[5014]: W1205 11:06:01.179821 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd669315d_fcb3_4d56_a85a_532a432c957d.slice/crio-b9fa4cf5aa9bc420bcee16cab232d750fa4066cfa62bc04afc7c63ef8b0d51e9 WatchSource:0}: Error finding container b9fa4cf5aa9bc420bcee16cab232d750fa4066cfa62bc04afc7c63ef8b0d51e9: Status 404 returned error can't find the container with id b9fa4cf5aa9bc420bcee16cab232d750fa4066cfa62bc04afc7c63ef8b0d51e9 Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.183076 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c68wl"] Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.344916 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.346978 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.350136 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.358392 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.358984 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.359427 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.360919 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.361125 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.362213 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-dtjpt" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.370660 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.388040 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-config-data\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.388119 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/909c5067-f4b6-4303-98e0-7f0763da52f9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.388161 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.388225 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.389317 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.389473 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.389517 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.389665 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.391456 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.391548 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq84j\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-kube-api-access-wq84j\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.391663 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/909c5067-f4b6-4303-98e0-7f0763da52f9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.412440 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-6dh7d"] Dec 05 11:06:01 crc kubenswrapper[5014]: W1205 11:06:01.436426 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773db78_f079_4d0b_8909_124b9e1d15bf.slice/crio-ad85a607cd9b9cbf2efd5a3ea0a6f3edac032081adaf8ede37aadf9cced7f9b9 WatchSource:0}: Error finding container ad85a607cd9b9cbf2efd5a3ea0a6f3edac032081adaf8ede37aadf9cced7f9b9: Status 404 returned error can't find the container with id ad85a607cd9b9cbf2efd5a3ea0a6f3edac032081adaf8ede37aadf9cced7f9b9 Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.493594 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.493678 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.493717 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.493745 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.494224 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.494309 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.494348 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq84j\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-kube-api-access-wq84j\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.494393 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/909c5067-f4b6-4303-98e0-7f0763da52f9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.494434 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-config-data\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.494455 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/909c5067-f4b6-4303-98e0-7f0763da52f9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.494484 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.495537 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.495943 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.496574 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-config-data\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.496633 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.496922 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.496947 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.502493 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/909c5067-f4b6-4303-98e0-7f0763da52f9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.504520 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.506338 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/909c5067-f4b6-4303-98e0-7f0763da52f9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.515138 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq84j\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-kube-api-access-wq84j\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.515312 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.529159 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.650028 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-c68wl" event={"ID":"d669315d-fcb3-4d56-a85a-532a432c957d","Type":"ContainerStarted","Data":"b9fa4cf5aa9bc420bcee16cab232d750fa4066cfa62bc04afc7c63ef8b0d51e9"} Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.651255 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" event={"ID":"c773db78-f079-4d0b-8909-124b9e1d15bf","Type":"ContainerStarted","Data":"ad85a607cd9b9cbf2efd5a3ea0a6f3edac032081adaf8ede37aadf9cced7f9b9"} Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.683486 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.685357 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.687964 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.688198 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.689510 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-gvntn" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.693949 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.693981 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.694292 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.697551 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.714194 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.717589 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.802320 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.802383 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72dqf\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-kube-api-access-72dqf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.802409 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.802446 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.802468 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.802515 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.802547 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.802575 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.802617 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.804065 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.804130 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909118 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909189 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909213 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909245 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909287 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72dqf\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-kube-api-access-72dqf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909313 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909355 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909382 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909429 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909456 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.909483 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.910250 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.911043 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.912589 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.912999 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.915447 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.916831 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.917233 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.918544 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.919820 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.928264 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.933990 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72dqf\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-kube-api-access-72dqf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:01 crc kubenswrapper[5014]: I1205 11:06:01.982323 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:02 crc kubenswrapper[5014]: I1205 11:06:02.033627 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:06:02 crc kubenswrapper[5014]: I1205 11:06:02.255869 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:06:02 crc kubenswrapper[5014]: W1205 11:06:02.270951 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod909c5067_f4b6_4303_98e0_7f0763da52f9.slice/crio-7c186f81c044a96e47cf1dc31c175f356cc731a3c030c8d6b3d929633cad5587 WatchSource:0}: Error finding container 7c186f81c044a96e47cf1dc31c175f356cc731a3c030c8d6b3d929633cad5587: Status 404 returned error can't find the container with id 7c186f81c044a96e47cf1dc31c175f356cc731a3c030c8d6b3d929633cad5587 Dec 05 11:06:02 crc kubenswrapper[5014]: I1205 11:06:02.522044 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:06:02 crc kubenswrapper[5014]: W1205 11:06:02.525599 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e9300c9_3a44_43c1_bbe7_d0959a35eee1.slice/crio-a116254899486103a8b79f5c5d89c8079b39ea20e5d37007390d55eb7ea00076 WatchSource:0}: Error finding container a116254899486103a8b79f5c5d89c8079b39ea20e5d37007390d55eb7ea00076: Status 404 returned error can't find the container with id a116254899486103a8b79f5c5d89c8079b39ea20e5d37007390d55eb7ea00076 Dec 05 11:06:02 crc kubenswrapper[5014]: I1205 11:06:02.675904 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"909c5067-f4b6-4303-98e0-7f0763da52f9","Type":"ContainerStarted","Data":"7c186f81c044a96e47cf1dc31c175f356cc731a3c030c8d6b3d929633cad5587"} Dec 05 11:06:02 crc kubenswrapper[5014]: I1205 11:06:02.678836 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9e9300c9-3a44-43c1-bbe7-d0959a35eee1","Type":"ContainerStarted","Data":"a116254899486103a8b79f5c5d89c8079b39ea20e5d37007390d55eb7ea00076"} Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.004121 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.008522 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.015750 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.016376 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-zfjkk" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.016550 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.017255 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.028288 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.034736 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.136509 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.136566 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2h76\" (UniqueName: \"kubernetes.io/projected/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-kube-api-access-l2h76\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.136607 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.136644 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.136686 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.136747 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-config-data-default\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.136802 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.136845 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-kolla-config\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.237906 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.237976 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-kolla-config\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.238017 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.238032 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2h76\" (UniqueName: \"kubernetes.io/projected/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-kube-api-access-l2h76\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.238058 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.238086 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.238114 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.238132 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-config-data-default\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.239094 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-kolla-config\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.239137 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-config-data-default\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.239226 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.240359 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.241489 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.254118 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.257898 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.261794 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2h76\" (UniqueName: \"kubernetes.io/projected/8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa-kube-api-access-l2h76\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.276540 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa\") " pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.359843 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 11:06:03 crc kubenswrapper[5014]: I1205 11:06:03.919494 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 11:06:03 crc kubenswrapper[5014]: W1205 11:06:03.928140 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8dfd2d83_1267_4a23_a21a_e7f0d41ec0fa.slice/crio-091644fd9d12795b5cb084aec62ea8d0c6eb3f3946cde2c529266d3746513826 WatchSource:0}: Error finding container 091644fd9d12795b5cb084aec62ea8d0c6eb3f3946cde2c529266d3746513826: Status 404 returned error can't find the container with id 091644fd9d12795b5cb084aec62ea8d0c6eb3f3946cde2c529266d3746513826 Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.448567 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.450531 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.453093 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.453151 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.453098 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.459867 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-hn4kt" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.465400 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.572948 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.573014 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxsk2\" (UniqueName: \"kubernetes.io/projected/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-kube-api-access-sxsk2\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.573095 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.573130 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.573153 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.573183 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.573385 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.573508 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.618629 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.620831 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.628123 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-7pbmp" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.629044 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.629288 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.630100 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.682560 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.682633 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxsk2\" (UniqueName: \"kubernetes.io/projected/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-kube-api-access-sxsk2\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.682691 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.682714 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.682741 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.682766 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.682824 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.682872 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.683488 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.684858 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.685912 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.685916 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.687020 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.746525 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.767328 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.768363 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa","Type":"ContainerStarted","Data":"091644fd9d12795b5cb084aec62ea8d0c6eb3f3946cde2c529266d3746513826"} Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.780163 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxsk2\" (UniqueName: \"kubernetes.io/projected/021926d7-f03a-4b1b-bcf3-bdd000b17a1e-kube-api-access-sxsk2\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.782284 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.786438 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec058eab-d721-4033-b346-bddf43d1de29-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.786544 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec058eab-d721-4033-b346-bddf43d1de29-config-data\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.786564 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v66vc\" (UniqueName: \"kubernetes.io/projected/ec058eab-d721-4033-b346-bddf43d1de29-kube-api-access-v66vc\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.786676 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.786698 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ec058eab-d721-4033-b346-bddf43d1de29-kolla-config\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.786776 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec058eab-d721-4033-b346-bddf43d1de29-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.787054 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"021926d7-f03a-4b1b-bcf3-bdd000b17a1e\") " pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.824376 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.888101 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec058eab-d721-4033-b346-bddf43d1de29-config-data\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.888152 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v66vc\" (UniqueName: \"kubernetes.io/projected/ec058eab-d721-4033-b346-bddf43d1de29-kube-api-access-v66vc\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.888202 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ec058eab-d721-4033-b346-bddf43d1de29-kolla-config\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.888244 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec058eab-d721-4033-b346-bddf43d1de29-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.888346 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec058eab-d721-4033-b346-bddf43d1de29-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.902007 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec058eab-d721-4033-b346-bddf43d1de29-config-data\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.904922 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ec058eab-d721-4033-b346-bddf43d1de29-kolla-config\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.921721 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec058eab-d721-4033-b346-bddf43d1de29-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.933785 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec058eab-d721-4033-b346-bddf43d1de29-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:04 crc kubenswrapper[5014]: I1205 11:06:04.949473 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v66vc\" (UniqueName: \"kubernetes.io/projected/ec058eab-d721-4033-b346-bddf43d1de29-kube-api-access-v66vc\") pod \"memcached-0\" (UID: \"ec058eab-d721-4033-b346-bddf43d1de29\") " pod="openstack/memcached-0" Dec 05 11:06:05 crc kubenswrapper[5014]: I1205 11:06:05.000112 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 11:06:06 crc kubenswrapper[5014]: I1205 11:06:06.704775 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:06:06 crc kubenswrapper[5014]: I1205 11:06:06.705980 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:06:06 crc kubenswrapper[5014]: I1205 11:06:06.709449 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-jkmxr" Dec 05 11:06:06 crc kubenswrapper[5014]: I1205 11:06:06.721834 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:06:06 crc kubenswrapper[5014]: I1205 11:06:06.735995 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcvxl\" (UniqueName: \"kubernetes.io/projected/53c31740-5b9c-402e-bccb-929d64de7669-kube-api-access-qcvxl\") pod \"kube-state-metrics-0\" (UID: \"53c31740-5b9c-402e-bccb-929d64de7669\") " pod="openstack/kube-state-metrics-0" Dec 05 11:06:06 crc kubenswrapper[5014]: I1205 11:06:06.839176 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcvxl\" (UniqueName: \"kubernetes.io/projected/53c31740-5b9c-402e-bccb-929d64de7669-kube-api-access-qcvxl\") pod \"kube-state-metrics-0\" (UID: \"53c31740-5b9c-402e-bccb-929d64de7669\") " pod="openstack/kube-state-metrics-0" Dec 05 11:06:06 crc kubenswrapper[5014]: I1205 11:06:06.878570 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcvxl\" (UniqueName: \"kubernetes.io/projected/53c31740-5b9c-402e-bccb-929d64de7669-kube-api-access-qcvxl\") pod \"kube-state-metrics-0\" (UID: \"53c31740-5b9c-402e-bccb-929d64de7669\") " pod="openstack/kube-state-metrics-0" Dec 05 11:06:07 crc kubenswrapper[5014]: I1205 11:06:07.089476 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.733512 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-6wk9t"] Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.734954 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.744546 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.744815 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.744968 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-ht7gc" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.754345 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-mfsjr"] Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.758168 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.772405 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6wk9t"] Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.781851 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-mfsjr"] Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.823695 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-var-log\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.823757 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-var-run-ovn\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.823786 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-var-log-ovn\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.823819 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-etc-ovs\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.823842 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxtbx\" (UniqueName: \"kubernetes.io/projected/abc53eaa-a216-4ea8-a223-4e2c79562edb-kube-api-access-pxtbx\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.823879 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abc53eaa-a216-4ea8-a223-4e2c79562edb-scripts\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.823898 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-var-run\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.823917 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-var-lib\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.824007 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-scripts\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.824028 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-var-run\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.824069 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-combined-ca-bundle\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.824096 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5vdd\" (UniqueName: \"kubernetes.io/projected/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-kube-api-access-d5vdd\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.824113 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-ovn-controller-tls-certs\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.925924 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-var-run-ovn\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.925974 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-var-log-ovn\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.926003 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-etc-ovs\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.926033 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxtbx\" (UniqueName: \"kubernetes.io/projected/abc53eaa-a216-4ea8-a223-4e2c79562edb-kube-api-access-pxtbx\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.926100 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abc53eaa-a216-4ea8-a223-4e2c79562edb-scripts\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.926120 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-var-run\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.926899 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-var-lib\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.926643 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-etc-ovs\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.926643 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-var-run\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.926773 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-var-run-ovn\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.926586 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-var-log-ovn\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.927101 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-var-lib\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.927170 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-scripts\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.927322 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-var-run\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.927375 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-combined-ca-bundle\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.927460 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-var-run\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.927510 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-ovn-controller-tls-certs\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.927540 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5vdd\" (UniqueName: \"kubernetes.io/projected/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-kube-api-access-d5vdd\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.927585 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-var-log\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.927746 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/abc53eaa-a216-4ea8-a223-4e2c79562edb-var-log\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.928786 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abc53eaa-a216-4ea8-a223-4e2c79562edb-scripts\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.929946 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-scripts\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.952108 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-ovn-controller-tls-certs\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.956940 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-combined-ca-bundle\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.958175 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxtbx\" (UniqueName: \"kubernetes.io/projected/abc53eaa-a216-4ea8-a223-4e2c79562edb-kube-api-access-pxtbx\") pod \"ovn-controller-ovs-mfsjr\" (UID: \"abc53eaa-a216-4ea8-a223-4e2c79562edb\") " pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:09 crc kubenswrapper[5014]: I1205 11:06:09.984061 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5vdd\" (UniqueName: \"kubernetes.io/projected/2b673e96-d37f-49d8-b3f2-c72cd66ab6db-kube-api-access-d5vdd\") pod \"ovn-controller-6wk9t\" (UID: \"2b673e96-d37f-49d8-b3f2-c72cd66ab6db\") " pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:10 crc kubenswrapper[5014]: I1205 11:06:10.061096 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:10 crc kubenswrapper[5014]: I1205 11:06:10.090601 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.106453 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.108815 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.115574 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.116709 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.116870 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.116972 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-6gk5h" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.117126 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.119816 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.192179 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.192334 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b28650ad-9ebf-471c-91c9-3adef7f85d9f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.192386 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28650ad-9ebf-471c-91c9-3adef7f85d9f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.192600 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z24mp\" (UniqueName: \"kubernetes.io/projected/b28650ad-9ebf-471c-91c9-3adef7f85d9f-kube-api-access-z24mp\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.192646 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b28650ad-9ebf-471c-91c9-3adef7f85d9f-config\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.192696 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b28650ad-9ebf-471c-91c9-3adef7f85d9f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.192929 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28650ad-9ebf-471c-91c9-3adef7f85d9f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.192972 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28650ad-9ebf-471c-91c9-3adef7f85d9f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.294658 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z24mp\" (UniqueName: \"kubernetes.io/projected/b28650ad-9ebf-471c-91c9-3adef7f85d9f-kube-api-access-z24mp\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.294715 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b28650ad-9ebf-471c-91c9-3adef7f85d9f-config\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.294752 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b28650ad-9ebf-471c-91c9-3adef7f85d9f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.294841 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28650ad-9ebf-471c-91c9-3adef7f85d9f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.294881 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28650ad-9ebf-471c-91c9-3adef7f85d9f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.294915 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.294959 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b28650ad-9ebf-471c-91c9-3adef7f85d9f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.295001 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28650ad-9ebf-471c-91c9-3adef7f85d9f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.296176 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.296373 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b28650ad-9ebf-471c-91c9-3adef7f85d9f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.301006 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b28650ad-9ebf-471c-91c9-3adef7f85d9f-config\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.301876 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28650ad-9ebf-471c-91c9-3adef7f85d9f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.301897 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28650ad-9ebf-471c-91c9-3adef7f85d9f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.307603 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b28650ad-9ebf-471c-91c9-3adef7f85d9f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.317828 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28650ad-9ebf-471c-91c9-3adef7f85d9f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.318741 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z24mp\" (UniqueName: \"kubernetes.io/projected/b28650ad-9ebf-471c-91c9-3adef7f85d9f-kube-api-access-z24mp\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.333246 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b28650ad-9ebf-471c-91c9-3adef7f85d9f\") " pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:12 crc kubenswrapper[5014]: I1205 11:06:12.441559 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.380063 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.381881 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.384594 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.384691 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.384851 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-zjqlg" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.385048 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.392543 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.440110 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzxs9\" (UniqueName: \"kubernetes.io/projected/af56d79f-8f8a-4710-96a9-7995c0a30467-kube-api-access-dzxs9\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.440155 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af56d79f-8f8a-4710-96a9-7995c0a30467-config\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.440194 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/af56d79f-8f8a-4710-96a9-7995c0a30467-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.440248 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/af56d79f-8f8a-4710-96a9-7995c0a30467-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.440330 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.440360 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af56d79f-8f8a-4710-96a9-7995c0a30467-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.440375 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af56d79f-8f8a-4710-96a9-7995c0a30467-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.440434 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/af56d79f-8f8a-4710-96a9-7995c0a30467-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.542125 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af56d79f-8f8a-4710-96a9-7995c0a30467-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.542177 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af56d79f-8f8a-4710-96a9-7995c0a30467-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.542240 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/af56d79f-8f8a-4710-96a9-7995c0a30467-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.542286 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzxs9\" (UniqueName: \"kubernetes.io/projected/af56d79f-8f8a-4710-96a9-7995c0a30467-kube-api-access-dzxs9\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.542312 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af56d79f-8f8a-4710-96a9-7995c0a30467-config\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.542350 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/af56d79f-8f8a-4710-96a9-7995c0a30467-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.542400 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/af56d79f-8f8a-4710-96a9-7995c0a30467-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.542452 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.542822 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.543359 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af56d79f-8f8a-4710-96a9-7995c0a30467-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.544816 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af56d79f-8f8a-4710-96a9-7995c0a30467-config\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.556451 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/af56d79f-8f8a-4710-96a9-7995c0a30467-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.572407 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af56d79f-8f8a-4710-96a9-7995c0a30467-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.586108 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzxs9\" (UniqueName: \"kubernetes.io/projected/af56d79f-8f8a-4710-96a9-7995c0a30467-kube-api-access-dzxs9\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.601489 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.606057 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/af56d79f-8f8a-4710-96a9-7995c0a30467-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.668908 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/af56d79f-8f8a-4710-96a9-7995c0a30467-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"af56d79f-8f8a-4710-96a9-7995c0a30467\") " pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:14 crc kubenswrapper[5014]: I1205 11:06:14.735087 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:29 crc kubenswrapper[5014]: E1205 11:06:29.481908 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 11:06:29 crc kubenswrapper[5014]: E1205 11:06:29.483161 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-72dqf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(9e9300c9-3a44-43c1-bbe7-d0959a35eee1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:06:29 crc kubenswrapper[5014]: E1205 11:06:29.489571 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" Dec 05 11:06:29 crc kubenswrapper[5014]: E1205 11:06:29.550347 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 11:06:29 crc kubenswrapper[5014]: E1205 11:06:29.550575 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wq84j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(909c5067-f4b6-4303-98e0-7f0763da52f9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:06:29 crc kubenswrapper[5014]: E1205 11:06:29.551819 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" Dec 05 11:06:30 crc kubenswrapper[5014]: E1205 11:06:30.069351 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" Dec 05 11:06:30 crc kubenswrapper[5014]: E1205 11:06:30.070254 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" Dec 05 11:06:30 crc kubenswrapper[5014]: E1205 11:06:30.999771 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 11:06:31 crc kubenswrapper[5014]: E1205 11:06:31.000017 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x994q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-4s9jk_openstack(d9b53e21-61a6-4799-bfca-c01b3fd1a8a8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:06:31 crc kubenswrapper[5014]: E1205 11:06:31.002010 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" podUID="d9b53e21-61a6-4799-bfca-c01b3fd1a8a8" Dec 05 11:06:31 crc kubenswrapper[5014]: E1205 11:06:31.013432 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 11:06:31 crc kubenswrapper[5014]: E1205 11:06:31.013630 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ght2h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-v9wv8_openstack(89a9e45a-2b93-43f6-9712-b7c2782ae5ad): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:06:31 crc kubenswrapper[5014]: E1205 11:06:31.014833 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" podUID="89a9e45a-2b93-43f6-9712-b7c2782ae5ad" Dec 05 11:06:31 crc kubenswrapper[5014]: E1205 11:06:31.041549 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 11:06:31 crc kubenswrapper[5014]: E1205 11:06:31.041773 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rssxf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-6dh7d_openstack(c773db78-f079-4d0b-8909-124b9e1d15bf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:06:31 crc kubenswrapper[5014]: E1205 11:06:31.043052 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" podUID="c773db78-f079-4d0b-8909-124b9e1d15bf" Dec 05 11:06:31 crc kubenswrapper[5014]: E1205 11:06:31.075712 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" podUID="c773db78-f079-4d0b-8909-124b9e1d15bf" Dec 05 11:06:32 crc kubenswrapper[5014]: E1205 11:06:32.880825 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 11:06:32 crc kubenswrapper[5014]: E1205 11:06:32.881702 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l2h76,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:06:32 crc kubenswrapper[5014]: E1205 11:06:32.882958 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa" Dec 05 11:06:32 crc kubenswrapper[5014]: E1205 11:06:32.918115 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 11:06:32 crc kubenswrapper[5014]: E1205 11:06:32.918587 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gcqvn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-c68wl_openstack(d669315d-fcb3-4d56-a85a-532a432c957d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:06:32 crc kubenswrapper[5014]: E1205 11:06:32.919817 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-c68wl" podUID="d669315d-fcb3-4d56-a85a-532a432c957d" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.017871 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.028625 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.095950 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" event={"ID":"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8","Type":"ContainerDied","Data":"0ead8cc15be0b79541e00db50a89b3a2ab6fd05e9a8d648c7c2ed6d7e36a5aec"} Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.096007 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4s9jk" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.097080 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" event={"ID":"89a9e45a-2b93-43f6-9712-b7c2782ae5ad","Type":"ContainerDied","Data":"33f20504d7bc65c0002ec4d6492eae8f08567a67627cd8901bcbcf0ecbdaf7fa"} Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.097337 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v9wv8" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.205789 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-config\") pod \"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8\" (UID: \"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8\") " Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.205833 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-config\") pod \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.205949 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x994q\" (UniqueName: \"kubernetes.io/projected/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-kube-api-access-x994q\") pod \"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8\" (UID: \"d9b53e21-61a6-4799-bfca-c01b3fd1a8a8\") " Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.205994 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ght2h\" (UniqueName: \"kubernetes.io/projected/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-kube-api-access-ght2h\") pod \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.206090 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-dns-svc\") pod \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\" (UID: \"89a9e45a-2b93-43f6-9712-b7c2782ae5ad\") " Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.207394 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-config" (OuterVolumeSpecName: "config") pod "d9b53e21-61a6-4799-bfca-c01b3fd1a8a8" (UID: "d9b53e21-61a6-4799-bfca-c01b3fd1a8a8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.207419 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "89a9e45a-2b93-43f6-9712-b7c2782ae5ad" (UID: "89a9e45a-2b93-43f6-9712-b7c2782ae5ad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:33 crc kubenswrapper[5014]: E1205 11:06:33.207618 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-c68wl" podUID="d669315d-fcb3-4d56-a85a-532a432c957d" Dec 05 11:06:33 crc kubenswrapper[5014]: E1205 11:06:33.207657 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.208068 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-config" (OuterVolumeSpecName: "config") pod "89a9e45a-2b93-43f6-9712-b7c2782ae5ad" (UID: "89a9e45a-2b93-43f6-9712-b7c2782ae5ad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.225925 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-kube-api-access-ght2h" (OuterVolumeSpecName: "kube-api-access-ght2h") pod "89a9e45a-2b93-43f6-9712-b7c2782ae5ad" (UID: "89a9e45a-2b93-43f6-9712-b7c2782ae5ad"). InnerVolumeSpecName "kube-api-access-ght2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.276672 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-kube-api-access-x994q" (OuterVolumeSpecName: "kube-api-access-x994q") pod "d9b53e21-61a6-4799-bfca-c01b3fd1a8a8" (UID: "d9b53e21-61a6-4799-bfca-c01b3fd1a8a8"). InnerVolumeSpecName "kube-api-access-x994q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.313139 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.313175 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.313188 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x994q\" (UniqueName: \"kubernetes.io/projected/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8-kube-api-access-x994q\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.313203 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ght2h\" (UniqueName: \"kubernetes.io/projected/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-kube-api-access-ght2h\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.313217 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89a9e45a-2b93-43f6-9712-b7c2782ae5ad-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.419567 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.461283 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4s9jk"] Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.482680 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4s9jk"] Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.515089 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9wv8"] Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.521681 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9wv8"] Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.792357 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:06:33 crc kubenswrapper[5014]: I1205 11:06:33.846255 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6wk9t"] Dec 05 11:06:34 crc kubenswrapper[5014]: I1205 11:06:34.010690 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 11:06:34 crc kubenswrapper[5014]: I1205 11:06:34.086747 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 11:06:34 crc kubenswrapper[5014]: I1205 11:06:34.109868 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"53c31740-5b9c-402e-bccb-929d64de7669","Type":"ContainerStarted","Data":"4d5c7c13fec2bf8f1613e8f2d4d85addfab2294cc72e1a70c94a72c8d0a5f0c0"} Dec 05 11:06:34 crc kubenswrapper[5014]: I1205 11:06:34.111352 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ec058eab-d721-4033-b346-bddf43d1de29","Type":"ContainerStarted","Data":"1dd6cc4f34954904bf55f835fb1b26788236ce2884aaede8baed1b9bd1e3eeb8"} Dec 05 11:06:34 crc kubenswrapper[5014]: I1205 11:06:34.112708 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b28650ad-9ebf-471c-91c9-3adef7f85d9f","Type":"ContainerStarted","Data":"d122a031482bf5ace4166ed85f9632adf005271e6f138f836142afc180fbc9a7"} Dec 05 11:06:34 crc kubenswrapper[5014]: I1205 11:06:34.113932 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6wk9t" event={"ID":"2b673e96-d37f-49d8-b3f2-c72cd66ab6db","Type":"ContainerStarted","Data":"25d52d4b52e136366da9b8ce98977601d3d15f55c0e1de856bd34be04705d345"} Dec 05 11:06:34 crc kubenswrapper[5014]: I1205 11:06:34.115326 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"021926d7-f03a-4b1b-bcf3-bdd000b17a1e","Type":"ContainerStarted","Data":"6fd8a03e4f966c9a2cc9cc05fe99fa640ad319d8014b208ab3efd224a335ef2c"} Dec 05 11:06:34 crc kubenswrapper[5014]: I1205 11:06:34.649602 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-mfsjr"] Dec 05 11:06:34 crc kubenswrapper[5014]: W1205 11:06:34.722105 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabc53eaa_a216_4ea8_a223_4e2c79562edb.slice/crio-8b0ef1f9b6e0dc11ba8f63111851a8b6de5af05418e6b5192115f1bde637ea92 WatchSource:0}: Error finding container 8b0ef1f9b6e0dc11ba8f63111851a8b6de5af05418e6b5192115f1bde637ea92: Status 404 returned error can't find the container with id 8b0ef1f9b6e0dc11ba8f63111851a8b6de5af05418e6b5192115f1bde637ea92 Dec 05 11:06:35 crc kubenswrapper[5014]: I1205 11:06:35.009416 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 11:06:35 crc kubenswrapper[5014]: I1205 11:06:35.125746 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mfsjr" event={"ID":"abc53eaa-a216-4ea8-a223-4e2c79562edb","Type":"ContainerStarted","Data":"8b0ef1f9b6e0dc11ba8f63111851a8b6de5af05418e6b5192115f1bde637ea92"} Dec 05 11:06:35 crc kubenswrapper[5014]: I1205 11:06:35.127462 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"021926d7-f03a-4b1b-bcf3-bdd000b17a1e","Type":"ContainerStarted","Data":"3bd2de29e6322e1f9349bc47ed2ddfac6833a4589a3cde5b4c4fc91e50d5589a"} Dec 05 11:06:35 crc kubenswrapper[5014]: I1205 11:06:35.328587 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89a9e45a-2b93-43f6-9712-b7c2782ae5ad" path="/var/lib/kubelet/pods/89a9e45a-2b93-43f6-9712-b7c2782ae5ad/volumes" Dec 05 11:06:35 crc kubenswrapper[5014]: I1205 11:06:35.329325 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9b53e21-61a6-4799-bfca-c01b3fd1a8a8" path="/var/lib/kubelet/pods/d9b53e21-61a6-4799-bfca-c01b3fd1a8a8/volumes" Dec 05 11:06:35 crc kubenswrapper[5014]: W1205 11:06:35.756309 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf56d79f_8f8a_4710_96a9_7995c0a30467.slice/crio-521ed2efc90618ca1b0242e4cc8702034b86a9d8dbdf93437e6cfa4e9c148014 WatchSource:0}: Error finding container 521ed2efc90618ca1b0242e4cc8702034b86a9d8dbdf93437e6cfa4e9c148014: Status 404 returned error can't find the container with id 521ed2efc90618ca1b0242e4cc8702034b86a9d8dbdf93437e6cfa4e9c148014 Dec 05 11:06:36 crc kubenswrapper[5014]: I1205 11:06:36.139610 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"af56d79f-8f8a-4710-96a9-7995c0a30467","Type":"ContainerStarted","Data":"521ed2efc90618ca1b0242e4cc8702034b86a9d8dbdf93437e6cfa4e9c148014"} Dec 05 11:06:39 crc kubenswrapper[5014]: I1205 11:06:39.167470 5014 generic.go:334] "Generic (PLEG): container finished" podID="021926d7-f03a-4b1b-bcf3-bdd000b17a1e" containerID="3bd2de29e6322e1f9349bc47ed2ddfac6833a4589a3cde5b4c4fc91e50d5589a" exitCode=0 Dec 05 11:06:39 crc kubenswrapper[5014]: I1205 11:06:39.167546 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"021926d7-f03a-4b1b-bcf3-bdd000b17a1e","Type":"ContainerDied","Data":"3bd2de29e6322e1f9349bc47ed2ddfac6833a4589a3cde5b4c4fc91e50d5589a"} Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.188784 5014 generic.go:334] "Generic (PLEG): container finished" podID="abc53eaa-a216-4ea8-a223-4e2c79562edb" containerID="06b102af039afe2fb16f44c196c5d476783929fc38135d4fe561b321f19a3c86" exitCode=0 Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.188891 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mfsjr" event={"ID":"abc53eaa-a216-4ea8-a223-4e2c79562edb","Type":"ContainerDied","Data":"06b102af039afe2fb16f44c196c5d476783929fc38135d4fe561b321f19a3c86"} Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.196298 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"021926d7-f03a-4b1b-bcf3-bdd000b17a1e","Type":"ContainerStarted","Data":"1212b9dcef33bf22fb68dd27c091e227972ce00ecca48e0821421e9472b04f81"} Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.200889 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"53c31740-5b9c-402e-bccb-929d64de7669","Type":"ContainerStarted","Data":"fa5598192edba6c6f74c8dec204a252c425ede2d7006b6d170640d7400d6273d"} Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.201200 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.210113 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ec058eab-d721-4033-b346-bddf43d1de29","Type":"ContainerStarted","Data":"0c6093d2fa5a4a780147acb7847fe68eb92f13c907b124619b91e875457d597b"} Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.211260 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.218416 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b28650ad-9ebf-471c-91c9-3adef7f85d9f","Type":"ContainerStarted","Data":"cc65db891e7c02c5a50fc780b44932d08c00d1f62c19b11a1d4acd58a07cef76"} Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.222917 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"af56d79f-8f8a-4710-96a9-7995c0a30467","Type":"ContainerStarted","Data":"520398d0ae7c668e37d62ac1f2694b9c90c0c624448bab70a2cccc5082dc0eb5"} Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.225790 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6wk9t" event={"ID":"2b673e96-d37f-49d8-b3f2-c72cd66ab6db","Type":"ContainerStarted","Data":"9feb2ea88ccb2d5043923c7cc5069afcc3774094981ed4edfee3b580e53f7784"} Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.226015 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-6wk9t" Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.240460 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=37.542440186 podStartE2EDuration="38.240432989s" podCreationTimestamp="2025-12-05 11:06:03 +0000 UTC" firstStartedPulling="2025-12-05 11:06:33.434492735 +0000 UTC m=+1120.382610439" lastFinishedPulling="2025-12-05 11:06:34.132485538 +0000 UTC m=+1121.080603242" observedRunningTime="2025-12-05 11:06:41.235402857 +0000 UTC m=+1128.183520561" watchObservedRunningTime="2025-12-05 11:06:41.240432989 +0000 UTC m=+1128.188550683" Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.253930 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=31.773752633 podStartE2EDuration="37.253909987s" podCreationTimestamp="2025-12-05 11:06:04 +0000 UTC" firstStartedPulling="2025-12-05 11:06:34.022738527 +0000 UTC m=+1120.970856231" lastFinishedPulling="2025-12-05 11:06:39.502895881 +0000 UTC m=+1126.451013585" observedRunningTime="2025-12-05 11:06:41.252997564 +0000 UTC m=+1128.201115278" watchObservedRunningTime="2025-12-05 11:06:41.253909987 +0000 UTC m=+1128.202027691" Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.286759 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=29.043888276 podStartE2EDuration="35.286721472s" podCreationTimestamp="2025-12-05 11:06:06 +0000 UTC" firstStartedPulling="2025-12-05 11:06:33.863192199 +0000 UTC m=+1120.811309903" lastFinishedPulling="2025-12-05 11:06:40.106025395 +0000 UTC m=+1127.054143099" observedRunningTime="2025-12-05 11:06:41.282433708 +0000 UTC m=+1128.230551422" watchObservedRunningTime="2025-12-05 11:06:41.286721472 +0000 UTC m=+1128.234839166" Dec 05 11:06:41 crc kubenswrapper[5014]: I1205 11:06:41.303701 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-6wk9t" podStartSLOduration=26.757186391 podStartE2EDuration="32.303682373s" podCreationTimestamp="2025-12-05 11:06:09 +0000 UTC" firstStartedPulling="2025-12-05 11:06:33.823805264 +0000 UTC m=+1120.771922968" lastFinishedPulling="2025-12-05 11:06:39.370301236 +0000 UTC m=+1126.318418950" observedRunningTime="2025-12-05 11:06:41.300385993 +0000 UTC m=+1128.248503697" watchObservedRunningTime="2025-12-05 11:06:41.303682373 +0000 UTC m=+1128.251800077" Dec 05 11:06:42 crc kubenswrapper[5014]: I1205 11:06:42.260646 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mfsjr" event={"ID":"abc53eaa-a216-4ea8-a223-4e2c79562edb","Type":"ContainerStarted","Data":"57aa20d551ce4c9ead43713e3f20d6e4259717975ff93a0993a36d1fdd9f4827"} Dec 05 11:06:42 crc kubenswrapper[5014]: I1205 11:06:42.261188 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-mfsjr" event={"ID":"abc53eaa-a216-4ea8-a223-4e2c79562edb","Type":"ContainerStarted","Data":"394c7783a6b9630abf71425905849a9dbee70686351ad68bddfb17dbf50f4953"} Dec 05 11:06:42 crc kubenswrapper[5014]: I1205 11:06:42.262489 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:42 crc kubenswrapper[5014]: I1205 11:06:42.262524 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:06:42 crc kubenswrapper[5014]: I1205 11:06:42.292944 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-mfsjr" podStartSLOduration=28.087558659 podStartE2EDuration="33.292913269s" podCreationTimestamp="2025-12-05 11:06:09 +0000 UTC" firstStartedPulling="2025-12-05 11:06:34.730834066 +0000 UTC m=+1121.678951770" lastFinishedPulling="2025-12-05 11:06:39.936188666 +0000 UTC m=+1126.884306380" observedRunningTime="2025-12-05 11:06:42.290656373 +0000 UTC m=+1129.238774077" watchObservedRunningTime="2025-12-05 11:06:42.292913269 +0000 UTC m=+1129.241030973" Dec 05 11:06:44 crc kubenswrapper[5014]: I1205 11:06:44.825169 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:44 crc kubenswrapper[5014]: I1205 11:06:44.826778 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:45 crc kubenswrapper[5014]: I1205 11:06:45.002591 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 11:06:45 crc kubenswrapper[5014]: I1205 11:06:45.290239 5014 generic.go:334] "Generic (PLEG): container finished" podID="c773db78-f079-4d0b-8909-124b9e1d15bf" containerID="1e4a9c31244ad7194ac92363b3b58982b9272eb836d83fa2423bcd2b34f73201" exitCode=0 Dec 05 11:06:45 crc kubenswrapper[5014]: I1205 11:06:45.290336 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" event={"ID":"c773db78-f079-4d0b-8909-124b9e1d15bf","Type":"ContainerDied","Data":"1e4a9c31244ad7194ac92363b3b58982b9272eb836d83fa2423bcd2b34f73201"} Dec 05 11:06:45 crc kubenswrapper[5014]: I1205 11:06:45.294169 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b28650ad-9ebf-471c-91c9-3adef7f85d9f","Type":"ContainerStarted","Data":"1180e80d61265e1bef625ef5f3dd59276e8b828e7b0df9806547bdf46b9fc8e1"} Dec 05 11:06:45 crc kubenswrapper[5014]: I1205 11:06:45.295544 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"af56d79f-8f8a-4710-96a9-7995c0a30467","Type":"ContainerStarted","Data":"a8b033220319605d4af7553e2cc2f9f0587b0c8d8a27ce3c04b44913ca03ac2f"} Dec 05 11:06:45 crc kubenswrapper[5014]: I1205 11:06:45.350093 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=24.021882216 podStartE2EDuration="32.350075423s" podCreationTimestamp="2025-12-05 11:06:13 +0000 UTC" firstStartedPulling="2025-12-05 11:06:35.762812298 +0000 UTC m=+1122.710930002" lastFinishedPulling="2025-12-05 11:06:44.091005505 +0000 UTC m=+1131.039123209" observedRunningTime="2025-12-05 11:06:45.341379582 +0000 UTC m=+1132.289497316" watchObservedRunningTime="2025-12-05 11:06:45.350075423 +0000 UTC m=+1132.298193127" Dec 05 11:06:45 crc kubenswrapper[5014]: I1205 11:06:45.379857 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=24.379106424 podStartE2EDuration="34.379836024s" podCreationTimestamp="2025-12-05 11:06:11 +0000 UTC" firstStartedPulling="2025-12-05 11:06:34.089482046 +0000 UTC m=+1121.037599750" lastFinishedPulling="2025-12-05 11:06:44.090211646 +0000 UTC m=+1131.038329350" observedRunningTime="2025-12-05 11:06:45.367556717 +0000 UTC m=+1132.315674431" watchObservedRunningTime="2025-12-05 11:06:45.379836024 +0000 UTC m=+1132.327953738" Dec 05 11:06:45 crc kubenswrapper[5014]: I1205 11:06:45.442304 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:45 crc kubenswrapper[5014]: I1205 11:06:45.483749 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.304199 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"909c5067-f4b6-4303-98e0-7f0763da52f9","Type":"ContainerStarted","Data":"1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d"} Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.306373 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9e9300c9-3a44-43c1-bbe7-d0959a35eee1","Type":"ContainerStarted","Data":"f439f7814c3c9edd6130fc7a0818b8bbbf080786b75946951491dae70a49d44d"} Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.308475 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" event={"ID":"c773db78-f079-4d0b-8909-124b9e1d15bf","Type":"ContainerStarted","Data":"d904b5d2e4ba9b129fcebb3aa7955731cae1e3cdc198ce075c8d2cd105397811"} Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.308867 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.366160 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.374688 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" podStartSLOduration=3.722000708 podStartE2EDuration="46.374664666s" podCreationTimestamp="2025-12-05 11:06:00 +0000 UTC" firstStartedPulling="2025-12-05 11:06:01.440654194 +0000 UTC m=+1088.388771898" lastFinishedPulling="2025-12-05 11:06:44.093318152 +0000 UTC m=+1131.041435856" observedRunningTime="2025-12-05 11:06:46.370030594 +0000 UTC m=+1133.318148308" watchObservedRunningTime="2025-12-05 11:06:46.374664666 +0000 UTC m=+1133.322782370" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.746583 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-6dh7d"] Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.767864 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-ff9qh"] Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.768941 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.770716 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.791069 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-vmzsx"] Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.792727 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.795260 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.805346 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ff9qh"] Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.819313 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-vmzsx"] Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.820408 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g65f4\" (UniqueName: \"kubernetes.io/projected/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-kube-api-access-g65f4\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.820445 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-combined-ca-bundle\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.820479 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-ovs-rundir\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.820504 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-ovn-rundir\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.820540 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.820563 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-config\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.922371 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g65f4\" (UniqueName: \"kubernetes.io/projected/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-kube-api-access-g65f4\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.922429 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjnhn\" (UniqueName: \"kubernetes.io/projected/63314209-eb5f-415a-b740-f69d33f74ad3-kube-api-access-mjnhn\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.922471 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.922499 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-combined-ca-bundle\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.922532 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.922556 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-ovs-rundir\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.922579 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-ovn-rundir\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.922599 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-config\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.922635 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.922666 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-config\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.923192 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-ovs-rundir\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.923192 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-ovn-rundir\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.923369 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-config\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.929535 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-combined-ca-bundle\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.942023 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:46 crc kubenswrapper[5014]: I1205 11:06:46.942368 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g65f4\" (UniqueName: \"kubernetes.io/projected/4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad-kube-api-access-g65f4\") pod \"ovn-controller-metrics-ff9qh\" (UID: \"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad\") " pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.023936 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-config\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.024093 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjnhn\" (UniqueName: \"kubernetes.io/projected/63314209-eb5f-415a-b740-f69d33f74ad3-kube-api-access-mjnhn\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.024141 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.024183 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.025013 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-config\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.025128 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.025155 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.056811 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjnhn\" (UniqueName: \"kubernetes.io/projected/63314209-eb5f-415a-b740-f69d33f74ad3-kube-api-access-mjnhn\") pod \"dnsmasq-dns-6bc7876d45-vmzsx\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.090962 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ff9qh" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.115390 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.157558 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c68wl"] Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.161743 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.228514 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8cc7fc4dc-zzgtc"] Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.230164 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.238737 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8cc7fc4dc-zzgtc"] Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.308585 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-vmzsx"] Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.328997 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-config\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.329315 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-dns-svc\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.329337 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-ovsdbserver-sb\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.329356 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shldj\" (UniqueName: \"kubernetes.io/projected/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-kube-api-access-shldj\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.376448 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6jd9j"] Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.382476 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.392524 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.394728 5014 generic.go:334] "Generic (PLEG): container finished" podID="d669315d-fcb3-4d56-a85a-532a432c957d" containerID="f4240b9c544ec39a1aa816207ba9c4addaf89bfe3effe301ebb5599d55b2c7d3" exitCode=0 Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.396797 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-c68wl" event={"ID":"d669315d-fcb3-4d56-a85a-532a432c957d","Type":"ContainerDied","Data":"f4240b9c544ec39a1aa816207ba9c4addaf89bfe3effe301ebb5599d55b2c7d3"} Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.397230 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" podUID="c773db78-f079-4d0b-8909-124b9e1d15bf" containerName="dnsmasq-dns" containerID="cri-o://d904b5d2e4ba9b129fcebb3aa7955731cae1e3cdc198ce075c8d2cd105397811" gracePeriod=10 Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.397606 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.409863 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6jd9j"] Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.433243 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-config\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.433339 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.433802 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpjrf\" (UniqueName: \"kubernetes.io/projected/3f1bd062-017b-44e7-bad9-09ad9138ebcf-kube-api-access-bpjrf\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.434657 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-dns-svc\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.434704 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-ovsdbserver-sb\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.434725 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shldj\" (UniqueName: \"kubernetes.io/projected/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-kube-api-access-shldj\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.434777 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-config\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.434962 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.435043 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-config\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.435605 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-dns-svc\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.435752 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.436390 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-ovsdbserver-sb\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.479050 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shldj\" (UniqueName: \"kubernetes.io/projected/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-kube-api-access-shldj\") pod \"dnsmasq-dns-8cc7fc4dc-zzgtc\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.537611 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpjrf\" (UniqueName: \"kubernetes.io/projected/3f1bd062-017b-44e7-bad9-09ad9138ebcf-kube-api-access-bpjrf\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.538489 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.538601 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-config\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.538673 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.538841 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.541478 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.542476 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.542546 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-config\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.555239 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.557174 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.562849 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpjrf\" (UniqueName: \"kubernetes.io/projected/3f1bd062-017b-44e7-bad9-09ad9138ebcf-kube-api-access-bpjrf\") pod \"dnsmasq-dns-b8fbc5445-6jd9j\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.721233 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ff9qh"] Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.738420 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.814537 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:47 crc kubenswrapper[5014]: I1205 11:06:47.893102 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.058768 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-vmzsx"] Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.149717 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.244560 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 11:06:48 crc kubenswrapper[5014]: E1205 11:06:48.245200 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d669315d-fcb3-4d56-a85a-532a432c957d" containerName="init" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.245212 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d669315d-fcb3-4d56-a85a-532a432c957d" containerName="init" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.245386 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d669315d-fcb3-4d56-a85a-532a432c957d" containerName="init" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.257032 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.264597 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-config\") pod \"d669315d-fcb3-4d56-a85a-532a432c957d\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.264712 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-dns-svc\") pod \"d669315d-fcb3-4d56-a85a-532a432c957d\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.264745 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcqvn\" (UniqueName: \"kubernetes.io/projected/d669315d-fcb3-4d56-a85a-532a432c957d-kube-api-access-gcqvn\") pod \"d669315d-fcb3-4d56-a85a-532a432c957d\" (UID: \"d669315d-fcb3-4d56-a85a-532a432c957d\") " Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.280789 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.281010 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-wxrj7" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.281167 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.281330 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.281910 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d669315d-fcb3-4d56-a85a-532a432c957d-kube-api-access-gcqvn" (OuterVolumeSpecName: "kube-api-access-gcqvn") pod "d669315d-fcb3-4d56-a85a-532a432c957d" (UID: "d669315d-fcb3-4d56-a85a-532a432c957d"). InnerVolumeSpecName "kube-api-access-gcqvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.292873 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.300933 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-config" (OuterVolumeSpecName: "config") pod "d669315d-fcb3-4d56-a85a-532a432c957d" (UID: "d669315d-fcb3-4d56-a85a-532a432c957d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.305731 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d669315d-fcb3-4d56-a85a-532a432c957d" (UID: "d669315d-fcb3-4d56-a85a-532a432c957d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.367529 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chv8f\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-kube-api-access-chv8f\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.367675 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.367726 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.367847 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-lock\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.367893 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-cache\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.367993 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.368008 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcqvn\" (UniqueName: \"kubernetes.io/projected/d669315d-fcb3-4d56-a85a-532a432c957d-kube-api-access-gcqvn\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.368021 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d669315d-fcb3-4d56-a85a-532a432c957d-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.416749 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" event={"ID":"63314209-eb5f-415a-b740-f69d33f74ad3","Type":"ContainerStarted","Data":"2da67ef5750df2e15ab431cb13a83044ae517999ab4a0473cdabccc695b12072"} Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.419084 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8cc7fc4dc-zzgtc"] Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.423355 5014 generic.go:334] "Generic (PLEG): container finished" podID="c773db78-f079-4d0b-8909-124b9e1d15bf" containerID="d904b5d2e4ba9b129fcebb3aa7955731cae1e3cdc198ce075c8d2cd105397811" exitCode=0 Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.423463 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" event={"ID":"c773db78-f079-4d0b-8909-124b9e1d15bf","Type":"ContainerDied","Data":"d904b5d2e4ba9b129fcebb3aa7955731cae1e3cdc198ce075c8d2cd105397811"} Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.426824 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ff9qh" event={"ID":"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad","Type":"ContainerStarted","Data":"0421e8a85e3ad9dd92f9fa26a3a8af2f2d5d06cc0e13590ef45a29ca82889583"} Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.430494 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-c68wl" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.433831 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-c68wl" event={"ID":"d669315d-fcb3-4d56-a85a-532a432c957d","Type":"ContainerDied","Data":"b9fa4cf5aa9bc420bcee16cab232d750fa4066cfa62bc04afc7c63ef8b0d51e9"} Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.433879 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.433899 5014 scope.go:117] "RemoveContainer" containerID="f4240b9c544ec39a1aa816207ba9c4addaf89bfe3effe301ebb5599d55b2c7d3" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.469975 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-lock\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.470031 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-cache\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.470075 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chv8f\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-kube-api-access-chv8f\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.470186 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.470229 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.470583 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-lock\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.470610 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.471056 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-cache\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: E1205 11:06:48.471144 5014 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:06:48 crc kubenswrapper[5014]: E1205 11:06:48.471160 5014 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:06:48 crc kubenswrapper[5014]: E1205 11:06:48.471194 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift podName:944ccaf8-60a1-4574-8dec-60c5c7ea3dcf nodeName:}" failed. No retries permitted until 2025-12-05 11:06:48.971179138 +0000 UTC m=+1135.919296842 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift") pod "swift-storage-0" (UID: "944ccaf8-60a1-4574-8dec-60c5c7ea3dcf") : configmap "swift-ring-files" not found Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.490417 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chv8f\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-kube-api-access-chv8f\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.512794 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.533058 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.580102 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6jd9j"] Dec 05 11:06:48 crc kubenswrapper[5014]: W1205 11:06:48.584916 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f1bd062_017b_44e7_bad9_09ad9138ebcf.slice/crio-77be6e9dfdb899a1814c1374d9e3abcbeec64af9f9ac9931aa93651af359690e WatchSource:0}: Error finding container 77be6e9dfdb899a1814c1374d9e3abcbeec64af9f9ac9931aa93651af359690e: Status 404 returned error can't find the container with id 77be6e9dfdb899a1814c1374d9e3abcbeec64af9f9ac9931aa93651af359690e Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.699609 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c68wl"] Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.713486 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c68wl"] Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.726902 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.742302 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.746761 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.748117 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.750258 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-w57s4" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.759721 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.797038 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.888223 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/55bc4dc6-b48b-4963-9004-7614f65bac44-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.888593 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqpnz\" (UniqueName: \"kubernetes.io/projected/55bc4dc6-b48b-4963-9004-7614f65bac44-kube-api-access-kqpnz\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.888640 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55bc4dc6-b48b-4963-9004-7614f65bac44-scripts\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.888656 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/55bc4dc6-b48b-4963-9004-7614f65bac44-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.888828 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55bc4dc6-b48b-4963-9004-7614f65bac44-config\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.889469 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/55bc4dc6-b48b-4963-9004-7614f65bac44-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.889587 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55bc4dc6-b48b-4963-9004-7614f65bac44-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: E1205 11:06:48.959993 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd669315d_fcb3_4d56_a85a_532a432c957d.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63314209_eb5f_415a_b740_f69d33f74ad3.slice/crio-fa4f524b1d3fd19ec175b6aa0e4cfa7dbc9de1e005e751bcd167b8f19f0ae499.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f19c5de_0ba9_4a20_b6e2_660f6abce0eb.slice/crio-conmon-8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.991463 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/55bc4dc6-b48b-4963-9004-7614f65bac44-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.991697 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55bc4dc6-b48b-4963-9004-7614f65bac44-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.991734 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/55bc4dc6-b48b-4963-9004-7614f65bac44-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.991757 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqpnz\" (UniqueName: \"kubernetes.io/projected/55bc4dc6-b48b-4963-9004-7614f65bac44-kube-api-access-kqpnz\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.991808 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55bc4dc6-b48b-4963-9004-7614f65bac44-scripts\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.991845 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/55bc4dc6-b48b-4963-9004-7614f65bac44-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.991900 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55bc4dc6-b48b-4963-9004-7614f65bac44-config\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.991941 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:48 crc kubenswrapper[5014]: E1205 11:06:48.992149 5014 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:06:48 crc kubenswrapper[5014]: E1205 11:06:48.992169 5014 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:06:48 crc kubenswrapper[5014]: E1205 11:06:48.992229 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift podName:944ccaf8-60a1-4574-8dec-60c5c7ea3dcf nodeName:}" failed. No retries permitted until 2025-12-05 11:06:49.992207171 +0000 UTC m=+1136.940324875 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift") pod "swift-storage-0" (UID: "944ccaf8-60a1-4574-8dec-60c5c7ea3dcf") : configmap "swift-ring-files" not found Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.996287 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/55bc4dc6-b48b-4963-9004-7614f65bac44-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.996473 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55bc4dc6-b48b-4963-9004-7614f65bac44-scripts\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:48 crc kubenswrapper[5014]: I1205 11:06:48.996861 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55bc4dc6-b48b-4963-9004-7614f65bac44-config\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.000232 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/55bc4dc6-b48b-4963-9004-7614f65bac44-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.001536 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/55bc4dc6-b48b-4963-9004-7614f65bac44-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.005835 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55bc4dc6-b48b-4963-9004-7614f65bac44-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.016959 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqpnz\" (UniqueName: \"kubernetes.io/projected/55bc4dc6-b48b-4963-9004-7614f65bac44-kube-api-access-kqpnz\") pod \"ovn-northd-0\" (UID: \"55bc4dc6-b48b-4963-9004-7614f65bac44\") " pod="openstack/ovn-northd-0" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.046991 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.093134 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.109564 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.222623 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.310027 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-dns-svc\") pod \"c773db78-f079-4d0b-8909-124b9e1d15bf\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.310081 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rssxf\" (UniqueName: \"kubernetes.io/projected/c773db78-f079-4d0b-8909-124b9e1d15bf-kube-api-access-rssxf\") pod \"c773db78-f079-4d0b-8909-124b9e1d15bf\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.310146 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-config\") pod \"c773db78-f079-4d0b-8909-124b9e1d15bf\" (UID: \"c773db78-f079-4d0b-8909-124b9e1d15bf\") " Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.327610 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c773db78-f079-4d0b-8909-124b9e1d15bf-kube-api-access-rssxf" (OuterVolumeSpecName: "kube-api-access-rssxf") pod "c773db78-f079-4d0b-8909-124b9e1d15bf" (UID: "c773db78-f079-4d0b-8909-124b9e1d15bf"). InnerVolumeSpecName "kube-api-access-rssxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.339313 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d669315d-fcb3-4d56-a85a-532a432c957d" path="/var/lib/kubelet/pods/d669315d-fcb3-4d56-a85a-532a432c957d/volumes" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.393023 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c773db78-f079-4d0b-8909-124b9e1d15bf" (UID: "c773db78-f079-4d0b-8909-124b9e1d15bf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.401799 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-config" (OuterVolumeSpecName: "config") pod "c773db78-f079-4d0b-8909-124b9e1d15bf" (UID: "c773db78-f079-4d0b-8909-124b9e1d15bf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.412993 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.413035 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rssxf\" (UniqueName: \"kubernetes.io/projected/c773db78-f079-4d0b-8909-124b9e1d15bf-kube-api-access-rssxf\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.413049 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c773db78-f079-4d0b-8909-124b9e1d15bf-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.498653 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa","Type":"ContainerStarted","Data":"675b639c51feba9ce82a8f31828fcecdce281b4ca37554d0a7cd5ae0c9e41d99"} Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.505116 5014 generic.go:334] "Generic (PLEG): container finished" podID="1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" containerID="8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141" exitCode=0 Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.505189 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" event={"ID":"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb","Type":"ContainerDied","Data":"8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141"} Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.505219 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" event={"ID":"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb","Type":"ContainerStarted","Data":"93803d8236323b27bca9cb2c9ebfd16797d58da8f4e8a4cd79c02eba1f011f8d"} Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.512405 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.512641 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-6dh7d" event={"ID":"c773db78-f079-4d0b-8909-124b9e1d15bf","Type":"ContainerDied","Data":"ad85a607cd9b9cbf2efd5a3ea0a6f3edac032081adaf8ede37aadf9cced7f9b9"} Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.512725 5014 scope.go:117] "RemoveContainer" containerID="d904b5d2e4ba9b129fcebb3aa7955731cae1e3cdc198ce075c8d2cd105397811" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.541158 5014 generic.go:334] "Generic (PLEG): container finished" podID="3f1bd062-017b-44e7-bad9-09ad9138ebcf" containerID="3444ec4072cf748b91556c9b0c2b06774c10e5ac2205bfed338e1eaa95663ced" exitCode=0 Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.541737 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" event={"ID":"3f1bd062-017b-44e7-bad9-09ad9138ebcf","Type":"ContainerDied","Data":"3444ec4072cf748b91556c9b0c2b06774c10e5ac2205bfed338e1eaa95663ced"} Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.541775 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" event={"ID":"3f1bd062-017b-44e7-bad9-09ad9138ebcf","Type":"ContainerStarted","Data":"77be6e9dfdb899a1814c1374d9e3abcbeec64af9f9ac9931aa93651af359690e"} Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.581616 5014 scope.go:117] "RemoveContainer" containerID="1e4a9c31244ad7194ac92363b3b58982b9272eb836d83fa2423bcd2b34f73201" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.589719 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ff9qh" event={"ID":"4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad","Type":"ContainerStarted","Data":"48cb446db2fe7bb5ad065f6c91cd6b36254998b39d3cf83eaee42dfa45199d48"} Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.615709 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-ff9qh" podStartSLOduration=3.615681688 podStartE2EDuration="3.615681688s" podCreationTimestamp="2025-12-05 11:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:06:49.609438616 +0000 UTC m=+1136.557556360" watchObservedRunningTime="2025-12-05 11:06:49.615681688 +0000 UTC m=+1136.563799402" Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.654255 5014 generic.go:334] "Generic (PLEG): container finished" podID="63314209-eb5f-415a-b740-f69d33f74ad3" containerID="fa4f524b1d3fd19ec175b6aa0e4cfa7dbc9de1e005e751bcd167b8f19f0ae499" exitCode=0 Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.655402 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" event={"ID":"63314209-eb5f-415a-b740-f69d33f74ad3","Type":"ContainerDied","Data":"fa4f524b1d3fd19ec175b6aa0e4cfa7dbc9de1e005e751bcd167b8f19f0ae499"} Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.739882 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-6dh7d"] Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.756599 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-6dh7d"] Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.936559 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 11:06:49 crc kubenswrapper[5014]: W1205 11:06:49.939435 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55bc4dc6_b48b_4963_9004_7614f65bac44.slice/crio-d40764149224793b470cef04445ce9b3ed864ac37c9eb0895d814c6d59d59918 WatchSource:0}: Error finding container d40764149224793b470cef04445ce9b3ed864ac37c9eb0895d814c6d59d59918: Status 404 returned error can't find the container with id d40764149224793b470cef04445ce9b3ed864ac37c9eb0895d814c6d59d59918 Dec 05 11:06:49 crc kubenswrapper[5014]: I1205 11:06:49.999244 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.030833 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:50 crc kubenswrapper[5014]: E1205 11:06:50.031014 5014 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:06:50 crc kubenswrapper[5014]: E1205 11:06:50.031036 5014 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:06:50 crc kubenswrapper[5014]: E1205 11:06:50.031089 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift podName:944ccaf8-60a1-4574-8dec-60c5c7ea3dcf nodeName:}" failed. No retries permitted until 2025-12-05 11:06:52.031070909 +0000 UTC m=+1138.979188613 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift") pod "swift-storage-0" (UID: "944ccaf8-60a1-4574-8dec-60c5c7ea3dcf") : configmap "swift-ring-files" not found Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.131989 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjnhn\" (UniqueName: \"kubernetes.io/projected/63314209-eb5f-415a-b740-f69d33f74ad3-kube-api-access-mjnhn\") pod \"63314209-eb5f-415a-b740-f69d33f74ad3\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.132047 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-dns-svc\") pod \"63314209-eb5f-415a-b740-f69d33f74ad3\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.132122 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-config\") pod \"63314209-eb5f-415a-b740-f69d33f74ad3\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.132139 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-ovsdbserver-sb\") pod \"63314209-eb5f-415a-b740-f69d33f74ad3\" (UID: \"63314209-eb5f-415a-b740-f69d33f74ad3\") " Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.140220 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63314209-eb5f-415a-b740-f69d33f74ad3-kube-api-access-mjnhn" (OuterVolumeSpecName: "kube-api-access-mjnhn") pod "63314209-eb5f-415a-b740-f69d33f74ad3" (UID: "63314209-eb5f-415a-b740-f69d33f74ad3"). InnerVolumeSpecName "kube-api-access-mjnhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.154451 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-config" (OuterVolumeSpecName: "config") pod "63314209-eb5f-415a-b740-f69d33f74ad3" (UID: "63314209-eb5f-415a-b740-f69d33f74ad3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.166662 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "63314209-eb5f-415a-b740-f69d33f74ad3" (UID: "63314209-eb5f-415a-b740-f69d33f74ad3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.166794 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "63314209-eb5f-415a-b740-f69d33f74ad3" (UID: "63314209-eb5f-415a-b740-f69d33f74ad3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.258923 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.258972 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.258987 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjnhn\" (UniqueName: \"kubernetes.io/projected/63314209-eb5f-415a-b740-f69d33f74ad3-kube-api-access-mjnhn\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.259002 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63314209-eb5f-415a-b740-f69d33f74ad3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.662885 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"55bc4dc6-b48b-4963-9004-7614f65bac44","Type":"ContainerStarted","Data":"d40764149224793b470cef04445ce9b3ed864ac37c9eb0895d814c6d59d59918"} Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.666061 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" event={"ID":"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb","Type":"ContainerStarted","Data":"7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0"} Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.666188 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.676528 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" event={"ID":"3f1bd062-017b-44e7-bad9-09ad9138ebcf","Type":"ContainerStarted","Data":"af564453c3a6f10be6edad2ef9073512a99e8d4116011d12211dd42b5e5951ff"} Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.676641 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.685161 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" event={"ID":"63314209-eb5f-415a-b740-f69d33f74ad3","Type":"ContainerDied","Data":"2da67ef5750df2e15ab431cb13a83044ae517999ab4a0473cdabccc695b12072"} Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.685231 5014 scope.go:117] "RemoveContainer" containerID="fa4f524b1d3fd19ec175b6aa0e4cfa7dbc9de1e005e751bcd167b8f19f0ae499" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.685414 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-vmzsx" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.755547 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" podStartSLOduration=3.755519715 podStartE2EDuration="3.755519715s" podCreationTimestamp="2025-12-05 11:06:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:06:50.732550258 +0000 UTC m=+1137.680667972" watchObservedRunningTime="2025-12-05 11:06:50.755519715 +0000 UTC m=+1137.703637419" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.770155 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" podStartSLOduration=3.770139529 podStartE2EDuration="3.770139529s" podCreationTimestamp="2025-12-05 11:06:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:06:50.769462113 +0000 UTC m=+1137.717579817" watchObservedRunningTime="2025-12-05 11:06:50.770139529 +0000 UTC m=+1137.718257233" Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.836906 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-vmzsx"] Dec 05 11:06:50 crc kubenswrapper[5014]: I1205 11:06:50.845417 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-vmzsx"] Dec 05 11:06:51 crc kubenswrapper[5014]: I1205 11:06:51.332745 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63314209-eb5f-415a-b740-f69d33f74ad3" path="/var/lib/kubelet/pods/63314209-eb5f-415a-b740-f69d33f74ad3/volumes" Dec 05 11:06:51 crc kubenswrapper[5014]: I1205 11:06:51.333597 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c773db78-f079-4d0b-8909-124b9e1d15bf" path="/var/lib/kubelet/pods/c773db78-f079-4d0b-8909-124b9e1d15bf/volumes" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.091223 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:52 crc kubenswrapper[5014]: E1205 11:06:52.091411 5014 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:06:52 crc kubenswrapper[5014]: E1205 11:06:52.091621 5014 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:06:52 crc kubenswrapper[5014]: E1205 11:06:52.091673 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift podName:944ccaf8-60a1-4574-8dec-60c5c7ea3dcf nodeName:}" failed. No retries permitted until 2025-12-05 11:06:56.091657401 +0000 UTC m=+1143.039775105 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift") pod "swift-storage-0" (UID: "944ccaf8-60a1-4574-8dec-60c5c7ea3dcf") : configmap "swift-ring-files" not found Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.201509 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-nxnqm"] Dec 05 11:06:52 crc kubenswrapper[5014]: E1205 11:06:52.201923 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c773db78-f079-4d0b-8909-124b9e1d15bf" containerName="dnsmasq-dns" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.201942 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c773db78-f079-4d0b-8909-124b9e1d15bf" containerName="dnsmasq-dns" Dec 05 11:06:52 crc kubenswrapper[5014]: E1205 11:06:52.201958 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c773db78-f079-4d0b-8909-124b9e1d15bf" containerName="init" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.201965 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c773db78-f079-4d0b-8909-124b9e1d15bf" containerName="init" Dec 05 11:06:52 crc kubenswrapper[5014]: E1205 11:06:52.201984 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63314209-eb5f-415a-b740-f69d33f74ad3" containerName="init" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.201994 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="63314209-eb5f-415a-b740-f69d33f74ad3" containerName="init" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.202145 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="63314209-eb5f-415a-b740-f69d33f74ad3" containerName="init" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.202166 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="c773db78-f079-4d0b-8909-124b9e1d15bf" containerName="dnsmasq-dns" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.202744 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.212708 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.213173 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.213574 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.214310 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-nxnqm"] Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.295217 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrcnb\" (UniqueName: \"kubernetes.io/projected/3d39e279-9315-4b5e-af14-ea88aef45b00-kube-api-access-mrcnb\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.295305 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-dispersionconf\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.295345 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-swiftconf\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.295376 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-scripts\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.295479 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3d39e279-9315-4b5e-af14-ea88aef45b00-etc-swift\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.295636 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-ring-data-devices\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.295720 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-combined-ca-bundle\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.397303 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-swiftconf\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.397376 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-scripts\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.397409 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3d39e279-9315-4b5e-af14-ea88aef45b00-etc-swift\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.397473 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-ring-data-devices\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.397506 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-combined-ca-bundle\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.397577 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrcnb\" (UniqueName: \"kubernetes.io/projected/3d39e279-9315-4b5e-af14-ea88aef45b00-kube-api-access-mrcnb\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.397620 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-dispersionconf\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.422895 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-scripts\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.423305 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3d39e279-9315-4b5e-af14-ea88aef45b00-etc-swift\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.423527 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-ring-data-devices\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.428049 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-dispersionconf\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.429329 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrcnb\" (UniqueName: \"kubernetes.io/projected/3d39e279-9315-4b5e-af14-ea88aef45b00-kube-api-access-mrcnb\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.429420 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-combined-ca-bundle\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.430087 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-swiftconf\") pod \"swift-ring-rebalance-nxnqm\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.527052 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:06:52 crc kubenswrapper[5014]: I1205 11:06:52.974097 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-nxnqm"] Dec 05 11:06:52 crc kubenswrapper[5014]: W1205 11:06:52.979451 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d39e279_9315_4b5e_af14_ea88aef45b00.slice/crio-b3142e2225eca7e7d5273b21716af271efb4b10edba8698eb1e9a14222284eeb WatchSource:0}: Error finding container b3142e2225eca7e7d5273b21716af271efb4b10edba8698eb1e9a14222284eeb: Status 404 returned error can't find the container with id b3142e2225eca7e7d5273b21716af271efb4b10edba8698eb1e9a14222284eeb Dec 05 11:06:53 crc kubenswrapper[5014]: I1205 11:06:53.720575 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nxnqm" event={"ID":"3d39e279-9315-4b5e-af14-ea88aef45b00","Type":"ContainerStarted","Data":"b3142e2225eca7e7d5273b21716af271efb4b10edba8698eb1e9a14222284eeb"} Dec 05 11:06:55 crc kubenswrapper[5014]: I1205 11:06:55.740776 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"55bc4dc6-b48b-4963-9004-7614f65bac44","Type":"ContainerStarted","Data":"ad3a79da0c4855e0eec73347dfdc9773663a5acddb21e1c8ffb4807be61ce16f"} Dec 05 11:06:56 crc kubenswrapper[5014]: I1205 11:06:56.167457 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:06:56 crc kubenswrapper[5014]: E1205 11:06:56.167731 5014 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:06:56 crc kubenswrapper[5014]: E1205 11:06:56.167769 5014 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:06:56 crc kubenswrapper[5014]: E1205 11:06:56.167851 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift podName:944ccaf8-60a1-4574-8dec-60c5c7ea3dcf nodeName:}" failed. No retries permitted until 2025-12-05 11:07:04.167826174 +0000 UTC m=+1151.115943878 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift") pod "swift-storage-0" (UID: "944ccaf8-60a1-4574-8dec-60c5c7ea3dcf") : configmap "swift-ring-files" not found Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.560554 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.758374 5014 generic.go:334] "Generic (PLEG): container finished" podID="8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa" containerID="675b639c51feba9ce82a8f31828fcecdce281b4ca37554d0a7cd5ae0c9e41d99" exitCode=0 Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.758531 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa","Type":"ContainerDied","Data":"675b639c51feba9ce82a8f31828fcecdce281b4ca37554d0a7cd5ae0c9e41d99"} Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.762407 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nxnqm" event={"ID":"3d39e279-9315-4b5e-af14-ea88aef45b00","Type":"ContainerStarted","Data":"aacceedcaa601a913f66d68aaf063dcd224e11485f62bf47ea9235d4db9b318d"} Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.771510 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"55bc4dc6-b48b-4963-9004-7614f65bac44","Type":"ContainerStarted","Data":"57fdaf27d6a5ce8e1af810659434eb62815de176b8b89ad710aa596df016ac33"} Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.771796 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.820458 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.840929 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=4.821425447 podStartE2EDuration="9.840912081s" podCreationTimestamp="2025-12-05 11:06:48 +0000 UTC" firstStartedPulling="2025-12-05 11:06:49.942317448 +0000 UTC m=+1136.890435162" lastFinishedPulling="2025-12-05 11:06:54.961804092 +0000 UTC m=+1141.909921796" observedRunningTime="2025-12-05 11:06:57.82766098 +0000 UTC m=+1144.775778694" watchObservedRunningTime="2025-12-05 11:06:57.840912081 +0000 UTC m=+1144.789029785" Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.876856 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-nxnqm" podStartSLOduration=1.605751423 podStartE2EDuration="5.876823342s" podCreationTimestamp="2025-12-05 11:06:52 +0000 UTC" firstStartedPulling="2025-12-05 11:06:52.981681921 +0000 UTC m=+1139.929799625" lastFinishedPulling="2025-12-05 11:06:57.25275384 +0000 UTC m=+1144.200871544" observedRunningTime="2025-12-05 11:06:57.861107391 +0000 UTC m=+1144.809225105" watchObservedRunningTime="2025-12-05 11:06:57.876823342 +0000 UTC m=+1144.824941046" Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.962888 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8cc7fc4dc-zzgtc"] Dec 05 11:06:57 crc kubenswrapper[5014]: I1205 11:06:57.963167 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" podUID="1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" containerName="dnsmasq-dns" containerID="cri-o://7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0" gracePeriod=10 Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.704590 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.812782 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa","Type":"ContainerStarted","Data":"26766b52cf811a5bff382e4c6be2580a97c8d784255654ef05376b863a601ced"} Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.828374 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-dns-svc\") pod \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.829063 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-ovsdbserver-sb\") pod \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.829152 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shldj\" (UniqueName: \"kubernetes.io/projected/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-kube-api-access-shldj\") pod \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.829194 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-config\") pod \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\" (UID: \"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb\") " Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.835676 5014 generic.go:334] "Generic (PLEG): container finished" podID="1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" containerID="7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0" exitCode=0 Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.836609 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.836762 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" event={"ID":"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb","Type":"ContainerDied","Data":"7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0"} Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.836786 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8cc7fc4dc-zzgtc" event={"ID":"1f19c5de-0ba9-4a20-b6e2-660f6abce0eb","Type":"ContainerDied","Data":"93803d8236323b27bca9cb2c9ebfd16797d58da8f4e8a4cd79c02eba1f011f8d"} Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.836806 5014 scope.go:117] "RemoveContainer" containerID="7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.848567 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-kube-api-access-shldj" (OuterVolumeSpecName: "kube-api-access-shldj") pod "1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" (UID: "1f19c5de-0ba9-4a20-b6e2-660f6abce0eb"). InnerVolumeSpecName "kube-api-access-shldj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.875971 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371978.978823 podStartE2EDuration="57.875952987s" podCreationTimestamp="2025-12-05 11:06:01 +0000 UTC" firstStartedPulling="2025-12-05 11:06:03.93030152 +0000 UTC m=+1090.878419224" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:06:58.869105602 +0000 UTC m=+1145.817223306" watchObservedRunningTime="2025-12-05 11:06:58.875952987 +0000 UTC m=+1145.824070691" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.894740 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" (UID: "1f19c5de-0ba9-4a20-b6e2-660f6abce0eb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.921490 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" (UID: "1f19c5de-0ba9-4a20-b6e2-660f6abce0eb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.933787 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shldj\" (UniqueName: \"kubernetes.io/projected/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-kube-api-access-shldj\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.933834 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.933850 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.962614 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-config" (OuterVolumeSpecName: "config") pod "1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" (UID: "1f19c5de-0ba9-4a20-b6e2-660f6abce0eb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:06:58 crc kubenswrapper[5014]: I1205 11:06:58.972498 5014 scope.go:117] "RemoveContainer" containerID="8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141" Dec 05 11:06:59 crc kubenswrapper[5014]: I1205 11:06:59.001970 5014 scope.go:117] "RemoveContainer" containerID="7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0" Dec 05 11:06:59 crc kubenswrapper[5014]: E1205 11:06:59.003455 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0\": container with ID starting with 7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0 not found: ID does not exist" containerID="7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0" Dec 05 11:06:59 crc kubenswrapper[5014]: I1205 11:06:59.003497 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0"} err="failed to get container status \"7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0\": rpc error: code = NotFound desc = could not find container \"7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0\": container with ID starting with 7fcdf130f28ea8a67a2996c9e4a1a828caa783d27180ee67c955e803a1fef2f0 not found: ID does not exist" Dec 05 11:06:59 crc kubenswrapper[5014]: I1205 11:06:59.003529 5014 scope.go:117] "RemoveContainer" containerID="8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141" Dec 05 11:06:59 crc kubenswrapper[5014]: E1205 11:06:59.007702 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141\": container with ID starting with 8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141 not found: ID does not exist" containerID="8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141" Dec 05 11:06:59 crc kubenswrapper[5014]: I1205 11:06:59.007752 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141"} err="failed to get container status \"8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141\": rpc error: code = NotFound desc = could not find container \"8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141\": container with ID starting with 8be1b6ed3b2a74e493ccce05ab442971e56515a90a682242879143168ef32141 not found: ID does not exist" Dec 05 11:06:59 crc kubenswrapper[5014]: I1205 11:06:59.037483 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:06:59 crc kubenswrapper[5014]: I1205 11:06:59.176878 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8cc7fc4dc-zzgtc"] Dec 05 11:06:59 crc kubenswrapper[5014]: I1205 11:06:59.181087 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8cc7fc4dc-zzgtc"] Dec 05 11:06:59 crc kubenswrapper[5014]: E1205 11:06:59.252157 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f19c5de_0ba9_4a20_b6e2_660f6abce0eb.slice/crio-93803d8236323b27bca9cb2c9ebfd16797d58da8f4e8a4cd79c02eba1f011f8d\": RecentStats: unable to find data in memory cache]" Dec 05 11:06:59 crc kubenswrapper[5014]: I1205 11:06:59.345985 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" path="/var/lib/kubelet/pods/1f19c5de-0ba9-4a20-b6e2-660f6abce0eb/volumes" Dec 05 11:07:03 crc kubenswrapper[5014]: I1205 11:07:03.360953 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 11:07:03 crc kubenswrapper[5014]: I1205 11:07:03.361321 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 11:07:04 crc kubenswrapper[5014]: I1205 11:07:04.189784 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:07:04 crc kubenswrapper[5014]: E1205 11:07:04.189954 5014 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 11:07:04 crc kubenswrapper[5014]: E1205 11:07:04.189977 5014 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 11:07:04 crc kubenswrapper[5014]: E1205 11:07:04.190043 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift podName:944ccaf8-60a1-4574-8dec-60c5c7ea3dcf nodeName:}" failed. No retries permitted until 2025-12-05 11:07:20.190022304 +0000 UTC m=+1167.138140008 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift") pod "swift-storage-0" (UID: "944ccaf8-60a1-4574-8dec-60c5c7ea3dcf") : configmap "swift-ring-files" not found Dec 05 11:07:07 crc kubenswrapper[5014]: I1205 11:07:07.775559 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 11:07:07 crc kubenswrapper[5014]: I1205 11:07:07.865916 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 11:07:08 crc kubenswrapper[5014]: I1205 11:07:08.928836 5014 generic.go:334] "Generic (PLEG): container finished" podID="3d39e279-9315-4b5e-af14-ea88aef45b00" containerID="aacceedcaa601a913f66d68aaf063dcd224e11485f62bf47ea9235d4db9b318d" exitCode=0 Dec 05 11:07:08 crc kubenswrapper[5014]: I1205 11:07:08.928927 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nxnqm" event={"ID":"3d39e279-9315-4b5e-af14-ea88aef45b00","Type":"ContainerDied","Data":"aacceedcaa601a913f66d68aaf063dcd224e11485f62bf47ea9235d4db9b318d"} Dec 05 11:07:09 crc kubenswrapper[5014]: I1205 11:07:09.162532 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.103348 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-6wk9t" podUID="2b673e96-d37f-49d8-b3f2-c72cd66ab6db" containerName="ovn-controller" probeResult="failure" output=< Dec 05 11:07:10 crc kubenswrapper[5014]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 11:07:10 crc kubenswrapper[5014]: > Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.273071 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.398153 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-dispersionconf\") pod \"3d39e279-9315-4b5e-af14-ea88aef45b00\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.398719 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrcnb\" (UniqueName: \"kubernetes.io/projected/3d39e279-9315-4b5e-af14-ea88aef45b00-kube-api-access-mrcnb\") pod \"3d39e279-9315-4b5e-af14-ea88aef45b00\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.398778 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3d39e279-9315-4b5e-af14-ea88aef45b00-etc-swift\") pod \"3d39e279-9315-4b5e-af14-ea88aef45b00\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.398814 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-swiftconf\") pod \"3d39e279-9315-4b5e-af14-ea88aef45b00\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.398841 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-scripts\") pod \"3d39e279-9315-4b5e-af14-ea88aef45b00\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.398864 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-combined-ca-bundle\") pod \"3d39e279-9315-4b5e-af14-ea88aef45b00\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.398991 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-ring-data-devices\") pod \"3d39e279-9315-4b5e-af14-ea88aef45b00\" (UID: \"3d39e279-9315-4b5e-af14-ea88aef45b00\") " Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.399940 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "3d39e279-9315-4b5e-af14-ea88aef45b00" (UID: "3d39e279-9315-4b5e-af14-ea88aef45b00"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.400029 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d39e279-9315-4b5e-af14-ea88aef45b00-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "3d39e279-9315-4b5e-af14-ea88aef45b00" (UID: "3d39e279-9315-4b5e-af14-ea88aef45b00"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.405234 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d39e279-9315-4b5e-af14-ea88aef45b00-kube-api-access-mrcnb" (OuterVolumeSpecName: "kube-api-access-mrcnb") pod "3d39e279-9315-4b5e-af14-ea88aef45b00" (UID: "3d39e279-9315-4b5e-af14-ea88aef45b00"). InnerVolumeSpecName "kube-api-access-mrcnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.409723 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "3d39e279-9315-4b5e-af14-ea88aef45b00" (UID: "3d39e279-9315-4b5e-af14-ea88aef45b00"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.421379 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-scripts" (OuterVolumeSpecName: "scripts") pod "3d39e279-9315-4b5e-af14-ea88aef45b00" (UID: "3d39e279-9315-4b5e-af14-ea88aef45b00"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.421602 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "3d39e279-9315-4b5e-af14-ea88aef45b00" (UID: "3d39e279-9315-4b5e-af14-ea88aef45b00"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.430436 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d39e279-9315-4b5e-af14-ea88aef45b00" (UID: "3d39e279-9315-4b5e-af14-ea88aef45b00"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.500874 5014 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.500904 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrcnb\" (UniqueName: \"kubernetes.io/projected/3d39e279-9315-4b5e-af14-ea88aef45b00-kube-api-access-mrcnb\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.500918 5014 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3d39e279-9315-4b5e-af14-ea88aef45b00-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.500926 5014 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.500936 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.500945 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d39e279-9315-4b5e-af14-ea88aef45b00-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.500956 5014 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3d39e279-9315-4b5e-af14-ea88aef45b00-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.949801 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nxnqm" event={"ID":"3d39e279-9315-4b5e-af14-ea88aef45b00","Type":"ContainerDied","Data":"b3142e2225eca7e7d5273b21716af271efb4b10edba8698eb1e9a14222284eeb"} Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.949861 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3142e2225eca7e7d5273b21716af271efb4b10edba8698eb1e9a14222284eeb" Dec 05 11:07:10 crc kubenswrapper[5014]: I1205 11:07:10.949878 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nxnqm" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.687616 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-nj5xc"] Dec 05 11:07:14 crc kubenswrapper[5014]: E1205 11:07:14.689715 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" containerName="dnsmasq-dns" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.689877 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" containerName="dnsmasq-dns" Dec 05 11:07:14 crc kubenswrapper[5014]: E1205 11:07:14.689971 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" containerName="init" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.690054 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" containerName="init" Dec 05 11:07:14 crc kubenswrapper[5014]: E1205 11:07:14.690143 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d39e279-9315-4b5e-af14-ea88aef45b00" containerName="swift-ring-rebalance" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.690226 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d39e279-9315-4b5e-af14-ea88aef45b00" containerName="swift-ring-rebalance" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.690532 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f19c5de-0ba9-4a20-b6e2-660f6abce0eb" containerName="dnsmasq-dns" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.690653 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d39e279-9315-4b5e-af14-ea88aef45b00" containerName="swift-ring-rebalance" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.691535 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nj5xc" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.697068 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-58ab-account-create-update-m6mpt"] Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.698918 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-58ab-account-create-update-m6mpt" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.701590 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.703589 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nj5xc"] Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.712885 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-58ab-account-create-update-m6mpt"] Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.769887 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brzq9\" (UniqueName: \"kubernetes.io/projected/bef9ec2f-df6e-4409-8e88-8ae5895faa24-kube-api-access-brzq9\") pod \"keystone-db-create-nj5xc\" (UID: \"bef9ec2f-df6e-4409-8e88-8ae5895faa24\") " pod="openstack/keystone-db-create-nj5xc" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.770065 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dbef5fef-ac9d-4c62-af8e-9956ce64a080-operator-scripts\") pod \"keystone-58ab-account-create-update-m6mpt\" (UID: \"dbef5fef-ac9d-4c62-af8e-9956ce64a080\") " pod="openstack/keystone-58ab-account-create-update-m6mpt" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.770118 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lszph\" (UniqueName: \"kubernetes.io/projected/dbef5fef-ac9d-4c62-af8e-9956ce64a080-kube-api-access-lszph\") pod \"keystone-58ab-account-create-update-m6mpt\" (UID: \"dbef5fef-ac9d-4c62-af8e-9956ce64a080\") " pod="openstack/keystone-58ab-account-create-update-m6mpt" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.770221 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bef9ec2f-df6e-4409-8e88-8ae5895faa24-operator-scripts\") pod \"keystone-db-create-nj5xc\" (UID: \"bef9ec2f-df6e-4409-8e88-8ae5895faa24\") " pod="openstack/keystone-db-create-nj5xc" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.871960 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dbef5fef-ac9d-4c62-af8e-9956ce64a080-operator-scripts\") pod \"keystone-58ab-account-create-update-m6mpt\" (UID: \"dbef5fef-ac9d-4c62-af8e-9956ce64a080\") " pod="openstack/keystone-58ab-account-create-update-m6mpt" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.872038 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lszph\" (UniqueName: \"kubernetes.io/projected/dbef5fef-ac9d-4c62-af8e-9956ce64a080-kube-api-access-lszph\") pod \"keystone-58ab-account-create-update-m6mpt\" (UID: \"dbef5fef-ac9d-4c62-af8e-9956ce64a080\") " pod="openstack/keystone-58ab-account-create-update-m6mpt" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.872134 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bef9ec2f-df6e-4409-8e88-8ae5895faa24-operator-scripts\") pod \"keystone-db-create-nj5xc\" (UID: \"bef9ec2f-df6e-4409-8e88-8ae5895faa24\") " pod="openstack/keystone-db-create-nj5xc" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.872182 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brzq9\" (UniqueName: \"kubernetes.io/projected/bef9ec2f-df6e-4409-8e88-8ae5895faa24-kube-api-access-brzq9\") pod \"keystone-db-create-nj5xc\" (UID: \"bef9ec2f-df6e-4409-8e88-8ae5895faa24\") " pod="openstack/keystone-db-create-nj5xc" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.872915 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dbef5fef-ac9d-4c62-af8e-9956ce64a080-operator-scripts\") pod \"keystone-58ab-account-create-update-m6mpt\" (UID: \"dbef5fef-ac9d-4c62-af8e-9956ce64a080\") " pod="openstack/keystone-58ab-account-create-update-m6mpt" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.873431 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bef9ec2f-df6e-4409-8e88-8ae5895faa24-operator-scripts\") pod \"keystone-db-create-nj5xc\" (UID: \"bef9ec2f-df6e-4409-8e88-8ae5895faa24\") " pod="openstack/keystone-db-create-nj5xc" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.894680 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lszph\" (UniqueName: \"kubernetes.io/projected/dbef5fef-ac9d-4c62-af8e-9956ce64a080-kube-api-access-lszph\") pod \"keystone-58ab-account-create-update-m6mpt\" (UID: \"dbef5fef-ac9d-4c62-af8e-9956ce64a080\") " pod="openstack/keystone-58ab-account-create-update-m6mpt" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.898830 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brzq9\" (UniqueName: \"kubernetes.io/projected/bef9ec2f-df6e-4409-8e88-8ae5895faa24-kube-api-access-brzq9\") pod \"keystone-db-create-nj5xc\" (UID: \"bef9ec2f-df6e-4409-8e88-8ae5895faa24\") " pod="openstack/keystone-db-create-nj5xc" Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.987591 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-lfvsl"] Dec 05 11:07:14 crc kubenswrapper[5014]: I1205 11:07:14.989063 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lfvsl" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.010930 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lfvsl"] Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.024851 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nj5xc" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.035698 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-58ab-account-create-update-m6mpt" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.076632 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44m8n\" (UniqueName: \"kubernetes.io/projected/908fbcc8-261b-40a5-9f70-b041c908b47e-kube-api-access-44m8n\") pod \"placement-db-create-lfvsl\" (UID: \"908fbcc8-261b-40a5-9f70-b041c908b47e\") " pod="openstack/placement-db-create-lfvsl" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.076691 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/908fbcc8-261b-40a5-9f70-b041c908b47e-operator-scripts\") pod \"placement-db-create-lfvsl\" (UID: \"908fbcc8-261b-40a5-9f70-b041c908b47e\") " pod="openstack/placement-db-create-lfvsl" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.146828 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-853a-account-create-update-dk6wb"] Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.148993 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-853a-account-create-update-dk6wb" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.151879 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.152493 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-6wk9t" podUID="2b673e96-d37f-49d8-b3f2-c72cd66ab6db" containerName="ovn-controller" probeResult="failure" output=< Dec 05 11:07:15 crc kubenswrapper[5014]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 11:07:15 crc kubenswrapper[5014]: > Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.172052 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-853a-account-create-update-dk6wb"] Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.178202 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nvv5\" (UniqueName: \"kubernetes.io/projected/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-kube-api-access-2nvv5\") pod \"placement-853a-account-create-update-dk6wb\" (UID: \"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e\") " pod="openstack/placement-853a-account-create-update-dk6wb" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.178389 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44m8n\" (UniqueName: \"kubernetes.io/projected/908fbcc8-261b-40a5-9f70-b041c908b47e-kube-api-access-44m8n\") pod \"placement-db-create-lfvsl\" (UID: \"908fbcc8-261b-40a5-9f70-b041c908b47e\") " pod="openstack/placement-db-create-lfvsl" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.178458 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/908fbcc8-261b-40a5-9f70-b041c908b47e-operator-scripts\") pod \"placement-db-create-lfvsl\" (UID: \"908fbcc8-261b-40a5-9f70-b041c908b47e\") " pod="openstack/placement-db-create-lfvsl" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.178534 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-operator-scripts\") pod \"placement-853a-account-create-update-dk6wb\" (UID: \"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e\") " pod="openstack/placement-853a-account-create-update-dk6wb" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.179528 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/908fbcc8-261b-40a5-9f70-b041c908b47e-operator-scripts\") pod \"placement-db-create-lfvsl\" (UID: \"908fbcc8-261b-40a5-9f70-b041c908b47e\") " pod="openstack/placement-db-create-lfvsl" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.185472 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.212109 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44m8n\" (UniqueName: \"kubernetes.io/projected/908fbcc8-261b-40a5-9f70-b041c908b47e-kube-api-access-44m8n\") pod \"placement-db-create-lfvsl\" (UID: \"908fbcc8-261b-40a5-9f70-b041c908b47e\") " pod="openstack/placement-db-create-lfvsl" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.229595 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-zmtmw"] Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.231022 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zmtmw" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.254569 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-zmtmw"] Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.280538 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfz5g\" (UniqueName: \"kubernetes.io/projected/404eef16-b758-4079-bc0e-cf8c9a17ff11-kube-api-access-tfz5g\") pod \"glance-db-create-zmtmw\" (UID: \"404eef16-b758-4079-bc0e-cf8c9a17ff11\") " pod="openstack/glance-db-create-zmtmw" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.280609 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nvv5\" (UniqueName: \"kubernetes.io/projected/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-kube-api-access-2nvv5\") pod \"placement-853a-account-create-update-dk6wb\" (UID: \"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e\") " pod="openstack/placement-853a-account-create-update-dk6wb" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.280750 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-operator-scripts\") pod \"placement-853a-account-create-update-dk6wb\" (UID: \"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e\") " pod="openstack/placement-853a-account-create-update-dk6wb" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.280789 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/404eef16-b758-4079-bc0e-cf8c9a17ff11-operator-scripts\") pod \"glance-db-create-zmtmw\" (UID: \"404eef16-b758-4079-bc0e-cf8c9a17ff11\") " pod="openstack/glance-db-create-zmtmw" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.282169 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-operator-scripts\") pod \"placement-853a-account-create-update-dk6wb\" (UID: \"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e\") " pod="openstack/placement-853a-account-create-update-dk6wb" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.303160 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nvv5\" (UniqueName: \"kubernetes.io/projected/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-kube-api-access-2nvv5\") pod \"placement-853a-account-create-update-dk6wb\" (UID: \"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e\") " pod="openstack/placement-853a-account-create-update-dk6wb" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.305173 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-mfsjr" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.311201 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lfvsl" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.391112 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-7cba-account-create-update-5s99p"] Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.396766 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/404eef16-b758-4079-bc0e-cf8c9a17ff11-operator-scripts\") pod \"glance-db-create-zmtmw\" (UID: \"404eef16-b758-4079-bc0e-cf8c9a17ff11\") " pod="openstack/glance-db-create-zmtmw" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.394231 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/404eef16-b758-4079-bc0e-cf8c9a17ff11-operator-scripts\") pod \"glance-db-create-zmtmw\" (UID: \"404eef16-b758-4079-bc0e-cf8c9a17ff11\") " pod="openstack/glance-db-create-zmtmw" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.404806 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfz5g\" (UniqueName: \"kubernetes.io/projected/404eef16-b758-4079-bc0e-cf8c9a17ff11-kube-api-access-tfz5g\") pod \"glance-db-create-zmtmw\" (UID: \"404eef16-b758-4079-bc0e-cf8c9a17ff11\") " pod="openstack/glance-db-create-zmtmw" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.410091 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7cba-account-create-update-5s99p" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.463092 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.499218 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-7cba-account-create-update-5s99p"] Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.508647 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njzfg\" (UniqueName: \"kubernetes.io/projected/9d05249d-225e-46be-9441-572d372fd7ab-kube-api-access-njzfg\") pod \"glance-7cba-account-create-update-5s99p\" (UID: \"9d05249d-225e-46be-9441-572d372fd7ab\") " pod="openstack/glance-7cba-account-create-update-5s99p" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.508837 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d05249d-225e-46be-9441-572d372fd7ab-operator-scripts\") pod \"glance-7cba-account-create-update-5s99p\" (UID: \"9d05249d-225e-46be-9441-572d372fd7ab\") " pod="openstack/glance-7cba-account-create-update-5s99p" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.519485 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfz5g\" (UniqueName: \"kubernetes.io/projected/404eef16-b758-4079-bc0e-cf8c9a17ff11-kube-api-access-tfz5g\") pod \"glance-db-create-zmtmw\" (UID: \"404eef16-b758-4079-bc0e-cf8c9a17ff11\") " pod="openstack/glance-db-create-zmtmw" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.558040 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zmtmw" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.574005 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-853a-account-create-update-dk6wb" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.612816 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njzfg\" (UniqueName: \"kubernetes.io/projected/9d05249d-225e-46be-9441-572d372fd7ab-kube-api-access-njzfg\") pod \"glance-7cba-account-create-update-5s99p\" (UID: \"9d05249d-225e-46be-9441-572d372fd7ab\") " pod="openstack/glance-7cba-account-create-update-5s99p" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.613003 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d05249d-225e-46be-9441-572d372fd7ab-operator-scripts\") pod \"glance-7cba-account-create-update-5s99p\" (UID: \"9d05249d-225e-46be-9441-572d372fd7ab\") " pod="openstack/glance-7cba-account-create-update-5s99p" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.613964 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d05249d-225e-46be-9441-572d372fd7ab-operator-scripts\") pod \"glance-7cba-account-create-update-5s99p\" (UID: \"9d05249d-225e-46be-9441-572d372fd7ab\") " pod="openstack/glance-7cba-account-create-update-5s99p" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.674156 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njzfg\" (UniqueName: \"kubernetes.io/projected/9d05249d-225e-46be-9441-572d372fd7ab-kube-api-access-njzfg\") pod \"glance-7cba-account-create-update-5s99p\" (UID: \"9d05249d-225e-46be-9441-572d372fd7ab\") " pod="openstack/glance-7cba-account-create-update-5s99p" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.724151 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-6wk9t-config-wkj5n"] Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.727877 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.733343 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.771207 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6wk9t-config-wkj5n"] Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.840796 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-log-ovn\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.840895 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-scripts\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.840971 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run-ovn\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.841016 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.841068 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-additional-scripts\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.841104 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm2f7\" (UniqueName: \"kubernetes.io/projected/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-kube-api-access-wm2f7\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.899453 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7cba-account-create-update-5s99p" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.943324 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-scripts\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.943445 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run-ovn\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.943489 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.943520 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-additional-scripts\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.943555 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm2f7\" (UniqueName: \"kubernetes.io/projected/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-kube-api-access-wm2f7\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.943628 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-log-ovn\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.944135 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-log-ovn\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.950489 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.950617 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run-ovn\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.951420 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-additional-scripts\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:15 crc kubenswrapper[5014]: I1205 11:07:15.971832 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-scripts\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:16 crc kubenswrapper[5014]: I1205 11:07:16.041144 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm2f7\" (UniqueName: \"kubernetes.io/projected/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-kube-api-access-wm2f7\") pod \"ovn-controller-6wk9t-config-wkj5n\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:16 crc kubenswrapper[5014]: I1205 11:07:16.132029 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:16 crc kubenswrapper[5014]: I1205 11:07:16.270173 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-58ab-account-create-update-m6mpt"] Dec 05 11:07:16 crc kubenswrapper[5014]: I1205 11:07:16.279700 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nj5xc"] Dec 05 11:07:16 crc kubenswrapper[5014]: I1205 11:07:16.603237 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-zmtmw"] Dec 05 11:07:16 crc kubenswrapper[5014]: I1205 11:07:16.711320 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6wk9t-config-wkj5n"] Dec 05 11:07:16 crc kubenswrapper[5014]: I1205 11:07:16.722217 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-853a-account-create-update-dk6wb"] Dec 05 11:07:16 crc kubenswrapper[5014]: I1205 11:07:16.728657 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lfvsl"] Dec 05 11:07:16 crc kubenswrapper[5014]: W1205 11:07:16.732432 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod908fbcc8_261b_40a5_9f70_b041c908b47e.slice/crio-7826677093c4044e6dbb00471d357b85051b614bbdc57e7d2fe36e2a2ed0d695 WatchSource:0}: Error finding container 7826677093c4044e6dbb00471d357b85051b614bbdc57e7d2fe36e2a2ed0d695: Status 404 returned error can't find the container with id 7826677093c4044e6dbb00471d357b85051b614bbdc57e7d2fe36e2a2ed0d695 Dec 05 11:07:16 crc kubenswrapper[5014]: W1205 11:07:16.735315 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc46d6d28_4203_4a25_bcf5_b0c8325ee4ab.slice/crio-42fc99a97bbd48f470fbd6735fc782db278d46c856ba0761b61d2fb4259ca7b5 WatchSource:0}: Error finding container 42fc99a97bbd48f470fbd6735fc782db278d46c856ba0761b61d2fb4259ca7b5: Status 404 returned error can't find the container with id 42fc99a97bbd48f470fbd6735fc782db278d46c856ba0761b61d2fb4259ca7b5 Dec 05 11:07:16 crc kubenswrapper[5014]: W1205 11:07:16.736148 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod727cdfb6_ffb7_4e5f_9aed_b856d87bb80e.slice/crio-a353253476952d7a7f038c8a14583adae3a9a894ff200b9fc08f2d73dfc71b56 WatchSource:0}: Error finding container a353253476952d7a7f038c8a14583adae3a9a894ff200b9fc08f2d73dfc71b56: Status 404 returned error can't find the container with id a353253476952d7a7f038c8a14583adae3a9a894ff200b9fc08f2d73dfc71b56 Dec 05 11:07:16 crc kubenswrapper[5014]: I1205 11:07:16.820254 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-7cba-account-create-update-5s99p"] Dec 05 11:07:16 crc kubenswrapper[5014]: W1205 11:07:16.843753 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d05249d_225e_46be_9441_572d372fd7ab.slice/crio-a24c6f1d75aaf02b65b6f835dfdffe6a8d4fd07c893aceec7c720d21ae53655e WatchSource:0}: Error finding container a24c6f1d75aaf02b65b6f835dfdffe6a8d4fd07c893aceec7c720d21ae53655e: Status 404 returned error can't find the container with id a24c6f1d75aaf02b65b6f835dfdffe6a8d4fd07c893aceec7c720d21ae53655e Dec 05 11:07:17 crc kubenswrapper[5014]: I1205 11:07:17.062908 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-58ab-account-create-update-m6mpt" event={"ID":"dbef5fef-ac9d-4c62-af8e-9956ce64a080","Type":"ContainerStarted","Data":"66220d0985e7c5ac91d9634fbdf7452aede698fa0d0c8b475b713f49f92bb073"} Dec 05 11:07:17 crc kubenswrapper[5014]: I1205 11:07:17.064220 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7cba-account-create-update-5s99p" event={"ID":"9d05249d-225e-46be-9441-572d372fd7ab","Type":"ContainerStarted","Data":"a24c6f1d75aaf02b65b6f835dfdffe6a8d4fd07c893aceec7c720d21ae53655e"} Dec 05 11:07:17 crc kubenswrapper[5014]: I1205 11:07:17.065540 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lfvsl" event={"ID":"908fbcc8-261b-40a5-9f70-b041c908b47e","Type":"ContainerStarted","Data":"7826677093c4044e6dbb00471d357b85051b614bbdc57e7d2fe36e2a2ed0d695"} Dec 05 11:07:17 crc kubenswrapper[5014]: I1205 11:07:17.066733 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6wk9t-config-wkj5n" event={"ID":"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab","Type":"ContainerStarted","Data":"42fc99a97bbd48f470fbd6735fc782db278d46c856ba0761b61d2fb4259ca7b5"} Dec 05 11:07:17 crc kubenswrapper[5014]: I1205 11:07:17.067997 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-853a-account-create-update-dk6wb" event={"ID":"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e","Type":"ContainerStarted","Data":"a353253476952d7a7f038c8a14583adae3a9a894ff200b9fc08f2d73dfc71b56"} Dec 05 11:07:17 crc kubenswrapper[5014]: I1205 11:07:17.069492 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nj5xc" event={"ID":"bef9ec2f-df6e-4409-8e88-8ae5895faa24","Type":"ContainerStarted","Data":"c6c225ec5ea5ec2ef4ceb133798db188654fc096912ee06839d8ebcca1b69f6b"} Dec 05 11:07:17 crc kubenswrapper[5014]: I1205 11:07:17.071078 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zmtmw" event={"ID":"404eef16-b758-4079-bc0e-cf8c9a17ff11","Type":"ContainerStarted","Data":"99229566fe8ed40cf2c56a342ee73d91f1362a3511a6f4649896251667888f4d"} Dec 05 11:07:19 crc kubenswrapper[5014]: E1205 11:07:19.685698 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e9300c9_3a44_43c1_bbe7_d0959a35eee1.slice/crio-conmon-f439f7814c3c9edd6130fc7a0818b8bbbf080786b75946951491dae70a49d44d.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.098674 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-6wk9t" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.099693 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lfvsl" event={"ID":"908fbcc8-261b-40a5-9f70-b041c908b47e","Type":"ContainerStarted","Data":"f8dbfa425be9978604e0fd4ec68f93736423a7f9293fba8e89a8d2aecfd9b3f7"} Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.101230 5014 generic.go:334] "Generic (PLEG): container finished" podID="909c5067-f4b6-4303-98e0-7f0763da52f9" containerID="1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d" exitCode=0 Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.101315 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"909c5067-f4b6-4303-98e0-7f0763da52f9","Type":"ContainerDied","Data":"1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d"} Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.106957 5014 generic.go:334] "Generic (PLEG): container finished" podID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" containerID="f439f7814c3c9edd6130fc7a0818b8bbbf080786b75946951491dae70a49d44d" exitCode=0 Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.106984 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9e9300c9-3a44-43c1-bbe7-d0959a35eee1","Type":"ContainerDied","Data":"f439f7814c3c9edd6130fc7a0818b8bbbf080786b75946951491dae70a49d44d"} Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.109605 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nj5xc" event={"ID":"bef9ec2f-df6e-4409-8e88-8ae5895faa24","Type":"ContainerStarted","Data":"7bff38f0f38a342547c3fe2deb6daa0f4d60f0e6fc928375e15778085441c09a"} Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.110989 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zmtmw" event={"ID":"404eef16-b758-4079-bc0e-cf8c9a17ff11","Type":"ContainerStarted","Data":"b09ba23a8f3d79286c29fab2eb193acc70aafc4f6974ea83bfcf6c0554fd2bd4"} Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.112954 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-58ab-account-create-update-m6mpt" event={"ID":"dbef5fef-ac9d-4c62-af8e-9956ce64a080","Type":"ContainerStarted","Data":"31caa4e1064501a178e9c926ae479f09dd1bff42e68ba2f6f6865d0fae3ba4a0"} Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.119451 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7cba-account-create-update-5s99p" event={"ID":"9d05249d-225e-46be-9441-572d372fd7ab","Type":"ContainerStarted","Data":"22c6d0c3a24d20e99969a2e8c1ef5b363a9c9588a33efdd405df8faf75eb6e1c"} Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.141207 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6wk9t-config-wkj5n" event={"ID":"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab","Type":"ContainerStarted","Data":"4a31852627b25d3b366e0362681bacb7705b8cfb0d21ebdbb2a2618984bbb033"} Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.145688 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-853a-account-create-update-dk6wb" event={"ID":"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e","Type":"ContainerStarted","Data":"de8e96d8b6dc3949c7215642d69644721be24ab154dba0c113e3e41dc051ebfd"} Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.152887 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-nj5xc" podStartSLOduration=6.152867014 podStartE2EDuration="6.152867014s" podCreationTimestamp="2025-12-05 11:07:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:20.146598882 +0000 UTC m=+1167.094716596" watchObservedRunningTime="2025-12-05 11:07:20.152867014 +0000 UTC m=+1167.100984718" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.180039 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-zmtmw" podStartSLOduration=5.180011782 podStartE2EDuration="5.180011782s" podCreationTimestamp="2025-12-05 11:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:20.172752376 +0000 UTC m=+1167.120870080" watchObservedRunningTime="2025-12-05 11:07:20.180011782 +0000 UTC m=+1167.128129506" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.288398 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.289861 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-58ab-account-create-update-m6mpt" podStartSLOduration=6.289827255 podStartE2EDuration="6.289827255s" podCreationTimestamp="2025-12-05 11:07:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:20.268485207 +0000 UTC m=+1167.216602931" watchObservedRunningTime="2025-12-05 11:07:20.289827255 +0000 UTC m=+1167.237944959" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.297625 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/944ccaf8-60a1-4574-8dec-60c5c7ea3dcf-etc-swift\") pod \"swift-storage-0\" (UID: \"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf\") " pod="openstack/swift-storage-0" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.297737 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-lfvsl" podStartSLOduration=6.297721466 podStartE2EDuration="6.297721466s" podCreationTimestamp="2025-12-05 11:07:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:20.28924795 +0000 UTC m=+1167.237365664" watchObservedRunningTime="2025-12-05 11:07:20.297721466 +0000 UTC m=+1167.245839160" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.314611 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-7cba-account-create-update-5s99p" podStartSLOduration=5.314582425 podStartE2EDuration="5.314582425s" podCreationTimestamp="2025-12-05 11:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:20.305824793 +0000 UTC m=+1167.253942497" watchObservedRunningTime="2025-12-05 11:07:20.314582425 +0000 UTC m=+1167.262700129" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.340381 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-6wk9t-config-wkj5n" podStartSLOduration=5.34035905 podStartE2EDuration="5.34035905s" podCreationTimestamp="2025-12-05 11:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:20.329430325 +0000 UTC m=+1167.277548029" watchObservedRunningTime="2025-12-05 11:07:20.34035905 +0000 UTC m=+1167.288476754" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.357505 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-853a-account-create-update-dk6wb" podStartSLOduration=5.357471505 podStartE2EDuration="5.357471505s" podCreationTimestamp="2025-12-05 11:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:20.346299214 +0000 UTC m=+1167.294416918" watchObservedRunningTime="2025-12-05 11:07:20.357471505 +0000 UTC m=+1167.305589209" Dec 05 11:07:20 crc kubenswrapper[5014]: I1205 11:07:20.415190 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.109447 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 11:07:21 crc kubenswrapper[5014]: W1205 11:07:21.270535 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod944ccaf8_60a1_4574_8dec_60c5c7ea3dcf.slice/crio-e260e27b31b03a6931b91ab0a0bcd2021d9b9ce8e97ccbd98b6abdfb71db2495 WatchSource:0}: Error finding container e260e27b31b03a6931b91ab0a0bcd2021d9b9ce8e97ccbd98b6abdfb71db2495: Status 404 returned error can't find the container with id e260e27b31b03a6931b91ab0a0bcd2021d9b9ce8e97ccbd98b6abdfb71db2495 Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.279877 5014 generic.go:334] "Generic (PLEG): container finished" podID="c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" containerID="4a31852627b25d3b366e0362681bacb7705b8cfb0d21ebdbb2a2618984bbb033" exitCode=0 Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.280756 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6wk9t-config-wkj5n" event={"ID":"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab","Type":"ContainerDied","Data":"4a31852627b25d3b366e0362681bacb7705b8cfb0d21ebdbb2a2618984bbb033"} Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.284502 5014 generic.go:334] "Generic (PLEG): container finished" podID="bef9ec2f-df6e-4409-8e88-8ae5895faa24" containerID="7bff38f0f38a342547c3fe2deb6daa0f4d60f0e6fc928375e15778085441c09a" exitCode=0 Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.284724 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nj5xc" event={"ID":"bef9ec2f-df6e-4409-8e88-8ae5895faa24","Type":"ContainerDied","Data":"7bff38f0f38a342547c3fe2deb6daa0f4d60f0e6fc928375e15778085441c09a"} Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.287686 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9e9300c9-3a44-43c1-bbe7-d0959a35eee1","Type":"ContainerStarted","Data":"9bf1e6076b04e4d05beeb349bbbbc834695a23b69f62d4d33127bd8770b6280c"} Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.287917 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.289542 5014 generic.go:334] "Generic (PLEG): container finished" podID="404eef16-b758-4079-bc0e-cf8c9a17ff11" containerID="b09ba23a8f3d79286c29fab2eb193acc70aafc4f6974ea83bfcf6c0554fd2bd4" exitCode=0 Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.289601 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zmtmw" event={"ID":"404eef16-b758-4079-bc0e-cf8c9a17ff11","Type":"ContainerDied","Data":"b09ba23a8f3d79286c29fab2eb193acc70aafc4f6974ea83bfcf6c0554fd2bd4"} Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.291348 5014 generic.go:334] "Generic (PLEG): container finished" podID="908fbcc8-261b-40a5-9f70-b041c908b47e" containerID="f8dbfa425be9978604e0fd4ec68f93736423a7f9293fba8e89a8d2aecfd9b3f7" exitCode=0 Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.291403 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lfvsl" event={"ID":"908fbcc8-261b-40a5-9f70-b041c908b47e","Type":"ContainerDied","Data":"f8dbfa425be9978604e0fd4ec68f93736423a7f9293fba8e89a8d2aecfd9b3f7"} Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.294036 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"909c5067-f4b6-4303-98e0-7f0763da52f9","Type":"ContainerStarted","Data":"a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617"} Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.294872 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.359543 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=39.794404513 podStartE2EDuration="1m21.359523131s" podCreationTimestamp="2025-12-05 11:06:00 +0000 UTC" firstStartedPulling="2025-12-05 11:06:02.52598455 +0000 UTC m=+1089.474102254" lastFinishedPulling="2025-12-05 11:06:44.091103168 +0000 UTC m=+1131.039220872" observedRunningTime="2025-12-05 11:07:21.357927962 +0000 UTC m=+1168.306045676" watchObservedRunningTime="2025-12-05 11:07:21.359523131 +0000 UTC m=+1168.307640835" Dec 05 11:07:21 crc kubenswrapper[5014]: I1205 11:07:21.419262 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=39.61141987 podStartE2EDuration="1m21.419246139s" podCreationTimestamp="2025-12-05 11:06:00 +0000 UTC" firstStartedPulling="2025-12-05 11:06:02.27509733 +0000 UTC m=+1089.223215034" lastFinishedPulling="2025-12-05 11:06:44.082923589 +0000 UTC m=+1131.031041303" observedRunningTime="2025-12-05 11:07:21.39538628 +0000 UTC m=+1168.343503994" watchObservedRunningTime="2025-12-05 11:07:21.419246139 +0000 UTC m=+1168.367363843" Dec 05 11:07:22 crc kubenswrapper[5014]: I1205 11:07:22.304303 5014 generic.go:334] "Generic (PLEG): container finished" podID="727cdfb6-ffb7-4e5f-9aed-b856d87bb80e" containerID="de8e96d8b6dc3949c7215642d69644721be24ab154dba0c113e3e41dc051ebfd" exitCode=0 Dec 05 11:07:22 crc kubenswrapper[5014]: I1205 11:07:22.304393 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-853a-account-create-update-dk6wb" event={"ID":"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e","Type":"ContainerDied","Data":"de8e96d8b6dc3949c7215642d69644721be24ab154dba0c113e3e41dc051ebfd"} Dec 05 11:07:22 crc kubenswrapper[5014]: I1205 11:07:22.305982 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"e260e27b31b03a6931b91ab0a0bcd2021d9b9ce8e97ccbd98b6abdfb71db2495"} Dec 05 11:07:22 crc kubenswrapper[5014]: I1205 11:07:22.307410 5014 generic.go:334] "Generic (PLEG): container finished" podID="dbef5fef-ac9d-4c62-af8e-9956ce64a080" containerID="31caa4e1064501a178e9c926ae479f09dd1bff42e68ba2f6f6865d0fae3ba4a0" exitCode=0 Dec 05 11:07:22 crc kubenswrapper[5014]: I1205 11:07:22.307479 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-58ab-account-create-update-m6mpt" event={"ID":"dbef5fef-ac9d-4c62-af8e-9956ce64a080","Type":"ContainerDied","Data":"31caa4e1064501a178e9c926ae479f09dd1bff42e68ba2f6f6865d0fae3ba4a0"} Dec 05 11:07:22 crc kubenswrapper[5014]: I1205 11:07:22.308718 5014 generic.go:334] "Generic (PLEG): container finished" podID="9d05249d-225e-46be-9441-572d372fd7ab" containerID="22c6d0c3a24d20e99969a2e8c1ef5b363a9c9588a33efdd405df8faf75eb6e1c" exitCode=0 Dec 05 11:07:22 crc kubenswrapper[5014]: I1205 11:07:22.308830 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7cba-account-create-update-5s99p" event={"ID":"9d05249d-225e-46be-9441-572d372fd7ab","Type":"ContainerDied","Data":"22c6d0c3a24d20e99969a2e8c1ef5b363a9c9588a33efdd405df8faf75eb6e1c"} Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.497885 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.507293 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zmtmw" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.515642 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nj5xc" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.527642 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run-ovn\") pod \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.527699 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-scripts\") pod \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.527737 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-log-ovn\") pod \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.527816 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bef9ec2f-df6e-4409-8e88-8ae5895faa24-operator-scripts\") pod \"bef9ec2f-df6e-4409-8e88-8ae5895faa24\" (UID: \"bef9ec2f-df6e-4409-8e88-8ae5895faa24\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.527845 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-additional-scripts\") pod \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.527885 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfz5g\" (UniqueName: \"kubernetes.io/projected/404eef16-b758-4079-bc0e-cf8c9a17ff11-kube-api-access-tfz5g\") pod \"404eef16-b758-4079-bc0e-cf8c9a17ff11\" (UID: \"404eef16-b758-4079-bc0e-cf8c9a17ff11\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.527961 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run\") pod \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.527990 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brzq9\" (UniqueName: \"kubernetes.io/projected/bef9ec2f-df6e-4409-8e88-8ae5895faa24-kube-api-access-brzq9\") pod \"bef9ec2f-df6e-4409-8e88-8ae5895faa24\" (UID: \"bef9ec2f-df6e-4409-8e88-8ae5895faa24\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.528037 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/404eef16-b758-4079-bc0e-cf8c9a17ff11-operator-scripts\") pod \"404eef16-b758-4079-bc0e-cf8c9a17ff11\" (UID: \"404eef16-b758-4079-bc0e-cf8c9a17ff11\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.528084 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wm2f7\" (UniqueName: \"kubernetes.io/projected/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-kube-api-access-wm2f7\") pod \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\" (UID: \"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.529515 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" (UID: "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.529816 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" (UID: "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.530464 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-scripts" (OuterVolumeSpecName: "scripts") pod "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" (UID: "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.530502 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" (UID: "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.530860 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bef9ec2f-df6e-4409-8e88-8ae5895faa24-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bef9ec2f-df6e-4409-8e88-8ae5895faa24" (UID: "bef9ec2f-df6e-4409-8e88-8ae5895faa24"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.530900 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run" (OuterVolumeSpecName: "var-run") pod "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" (UID: "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.538206 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/404eef16-b758-4079-bc0e-cf8c9a17ff11-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "404eef16-b758-4079-bc0e-cf8c9a17ff11" (UID: "404eef16-b758-4079-bc0e-cf8c9a17ff11"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.539433 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bef9ec2f-df6e-4409-8e88-8ae5895faa24-kube-api-access-brzq9" (OuterVolumeSpecName: "kube-api-access-brzq9") pod "bef9ec2f-df6e-4409-8e88-8ae5895faa24" (UID: "bef9ec2f-df6e-4409-8e88-8ae5895faa24"). InnerVolumeSpecName "kube-api-access-brzq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.541873 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/404eef16-b758-4079-bc0e-cf8c9a17ff11-kube-api-access-tfz5g" (OuterVolumeSpecName: "kube-api-access-tfz5g") pod "404eef16-b758-4079-bc0e-cf8c9a17ff11" (UID: "404eef16-b758-4079-bc0e-cf8c9a17ff11"). InnerVolumeSpecName "kube-api-access-tfz5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.548428 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lfvsl" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.558452 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-kube-api-access-wm2f7" (OuterVolumeSpecName: "kube-api-access-wm2f7") pod "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" (UID: "c46d6d28-4203-4a25-bcf5-b0c8325ee4ab"). InnerVolumeSpecName "kube-api-access-wm2f7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.645665 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44m8n\" (UniqueName: \"kubernetes.io/projected/908fbcc8-261b-40a5-9f70-b041c908b47e-kube-api-access-44m8n\") pod \"908fbcc8-261b-40a5-9f70-b041c908b47e\" (UID: \"908fbcc8-261b-40a5-9f70-b041c908b47e\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.645893 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/908fbcc8-261b-40a5-9f70-b041c908b47e-operator-scripts\") pod \"908fbcc8-261b-40a5-9f70-b041c908b47e\" (UID: \"908fbcc8-261b-40a5-9f70-b041c908b47e\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.648659 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/908fbcc8-261b-40a5-9f70-b041c908b47e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "908fbcc8-261b-40a5-9f70-b041c908b47e" (UID: "908fbcc8-261b-40a5-9f70-b041c908b47e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649755 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bef9ec2f-df6e-4409-8e88-8ae5895faa24-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649792 5014 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649806 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfz5g\" (UniqueName: \"kubernetes.io/projected/404eef16-b758-4079-bc0e-cf8c9a17ff11-kube-api-access-tfz5g\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649823 5014 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649840 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brzq9\" (UniqueName: \"kubernetes.io/projected/bef9ec2f-df6e-4409-8e88-8ae5895faa24-kube-api-access-brzq9\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649851 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/404eef16-b758-4079-bc0e-cf8c9a17ff11-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649862 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wm2f7\" (UniqueName: \"kubernetes.io/projected/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-kube-api-access-wm2f7\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649873 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/908fbcc8-261b-40a5-9f70-b041c908b47e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649883 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649892 5014 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.649902 5014 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.668264 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/908fbcc8-261b-40a5-9f70-b041c908b47e-kube-api-access-44m8n" (OuterVolumeSpecName: "kube-api-access-44m8n") pod "908fbcc8-261b-40a5-9f70-b041c908b47e" (UID: "908fbcc8-261b-40a5-9f70-b041c908b47e"). InnerVolumeSpecName "kube-api-access-44m8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.751367 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44m8n\" (UniqueName: \"kubernetes.io/projected/908fbcc8-261b-40a5-9f70-b041c908b47e-kube-api-access-44m8n\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.785020 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-58ab-account-create-update-m6mpt" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.815100 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-853a-account-create-update-dk6wb" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.852069 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nvv5\" (UniqueName: \"kubernetes.io/projected/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-kube-api-access-2nvv5\") pod \"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e\" (UID: \"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.852204 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dbef5fef-ac9d-4c62-af8e-9956ce64a080-operator-scripts\") pod \"dbef5fef-ac9d-4c62-af8e-9956ce64a080\" (UID: \"dbef5fef-ac9d-4c62-af8e-9956ce64a080\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.852228 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lszph\" (UniqueName: \"kubernetes.io/projected/dbef5fef-ac9d-4c62-af8e-9956ce64a080-kube-api-access-lszph\") pod \"dbef5fef-ac9d-4c62-af8e-9956ce64a080\" (UID: \"dbef5fef-ac9d-4c62-af8e-9956ce64a080\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.852327 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-operator-scripts\") pod \"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e\" (UID: \"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.852677 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbef5fef-ac9d-4c62-af8e-9956ce64a080-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dbef5fef-ac9d-4c62-af8e-9956ce64a080" (UID: "dbef5fef-ac9d-4c62-af8e-9956ce64a080"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.853065 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "727cdfb6-ffb7-4e5f-9aed-b856d87bb80e" (UID: "727cdfb6-ffb7-4e5f-9aed-b856d87bb80e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.853238 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7cba-account-create-update-5s99p" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.856703 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbef5fef-ac9d-4c62-af8e-9956ce64a080-kube-api-access-lszph" (OuterVolumeSpecName: "kube-api-access-lszph") pod "dbef5fef-ac9d-4c62-af8e-9956ce64a080" (UID: "dbef5fef-ac9d-4c62-af8e-9956ce64a080"). InnerVolumeSpecName "kube-api-access-lszph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.864758 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-kube-api-access-2nvv5" (OuterVolumeSpecName: "kube-api-access-2nvv5") pod "727cdfb6-ffb7-4e5f-9aed-b856d87bb80e" (UID: "727cdfb6-ffb7-4e5f-9aed-b856d87bb80e"). InnerVolumeSpecName "kube-api-access-2nvv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.953720 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d05249d-225e-46be-9441-572d372fd7ab-operator-scripts\") pod \"9d05249d-225e-46be-9441-572d372fd7ab\" (UID: \"9d05249d-225e-46be-9441-572d372fd7ab\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.954107 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njzfg\" (UniqueName: \"kubernetes.io/projected/9d05249d-225e-46be-9441-572d372fd7ab-kube-api-access-njzfg\") pod \"9d05249d-225e-46be-9441-572d372fd7ab\" (UID: \"9d05249d-225e-46be-9441-572d372fd7ab\") " Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.954570 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dbef5fef-ac9d-4c62-af8e-9956ce64a080-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.954596 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lszph\" (UniqueName: \"kubernetes.io/projected/dbef5fef-ac9d-4c62-af8e-9956ce64a080-kube-api-access-lszph\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.954611 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.954624 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nvv5\" (UniqueName: \"kubernetes.io/projected/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e-kube-api-access-2nvv5\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.954740 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d05249d-225e-46be-9441-572d372fd7ab-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9d05249d-225e-46be-9441-572d372fd7ab" (UID: "9d05249d-225e-46be-9441-572d372fd7ab"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:23 crc kubenswrapper[5014]: I1205 11:07:23.956804 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d05249d-225e-46be-9441-572d372fd7ab-kube-api-access-njzfg" (OuterVolumeSpecName: "kube-api-access-njzfg") pod "9d05249d-225e-46be-9441-572d372fd7ab" (UID: "9d05249d-225e-46be-9441-572d372fd7ab"). InnerVolumeSpecName "kube-api-access-njzfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.057943 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d05249d-225e-46be-9441-572d372fd7ab-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.058046 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njzfg\" (UniqueName: \"kubernetes.io/projected/9d05249d-225e-46be-9441-572d372fd7ab-kube-api-access-njzfg\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.328866 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"52197c5701028547979453f022f4b62c452ecb41289644987e44015033db8ea7"} Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.328946 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"2e3db600565ef606a1185bb446db8a42b2505f27d2e0facb05050b749f2f5d50"} Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.328960 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"9131b03c838a93fc2bf31721e4e59d9d72d799402d2555646ea6b28c785dafd2"} Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.340602 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-58ab-account-create-update-m6mpt" event={"ID":"dbef5fef-ac9d-4c62-af8e-9956ce64a080","Type":"ContainerDied","Data":"66220d0985e7c5ac91d9634fbdf7452aede698fa0d0c8b475b713f49f92bb073"} Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.340658 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66220d0985e7c5ac91d9634fbdf7452aede698fa0d0c8b475b713f49f92bb073" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.340762 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-58ab-account-create-update-m6mpt" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.354729 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7cba-account-create-update-5s99p" event={"ID":"9d05249d-225e-46be-9441-572d372fd7ab","Type":"ContainerDied","Data":"a24c6f1d75aaf02b65b6f835dfdffe6a8d4fd07c893aceec7c720d21ae53655e"} Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.354786 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a24c6f1d75aaf02b65b6f835dfdffe6a8d4fd07c893aceec7c720d21ae53655e" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.354876 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7cba-account-create-update-5s99p" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.360968 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lfvsl" event={"ID":"908fbcc8-261b-40a5-9f70-b041c908b47e","Type":"ContainerDied","Data":"7826677093c4044e6dbb00471d357b85051b614bbdc57e7d2fe36e2a2ed0d695"} Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.361012 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7826677093c4044e6dbb00471d357b85051b614bbdc57e7d2fe36e2a2ed0d695" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.361071 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lfvsl" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.369162 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6wk9t-config-wkj5n" event={"ID":"c46d6d28-4203-4a25-bcf5-b0c8325ee4ab","Type":"ContainerDied","Data":"42fc99a97bbd48f470fbd6735fc782db278d46c856ba0761b61d2fb4259ca7b5"} Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.369233 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42fc99a97bbd48f470fbd6735fc782db278d46c856ba0761b61d2fb4259ca7b5" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.369342 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6wk9t-config-wkj5n" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.381636 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-853a-account-create-update-dk6wb" event={"ID":"727cdfb6-ffb7-4e5f-9aed-b856d87bb80e","Type":"ContainerDied","Data":"a353253476952d7a7f038c8a14583adae3a9a894ff200b9fc08f2d73dfc71b56"} Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.381682 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a353253476952d7a7f038c8a14583adae3a9a894ff200b9fc08f2d73dfc71b56" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.381782 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-853a-account-create-update-dk6wb" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.387644 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nj5xc" event={"ID":"bef9ec2f-df6e-4409-8e88-8ae5895faa24","Type":"ContainerDied","Data":"c6c225ec5ea5ec2ef4ceb133798db188654fc096912ee06839d8ebcca1b69f6b"} Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.387686 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6c225ec5ea5ec2ef4ceb133798db188654fc096912ee06839d8ebcca1b69f6b" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.387752 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nj5xc" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.395949 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zmtmw" event={"ID":"404eef16-b758-4079-bc0e-cf8c9a17ff11","Type":"ContainerDied","Data":"99229566fe8ed40cf2c56a342ee73d91f1362a3511a6f4649896251667888f4d"} Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.395996 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99229566fe8ed40cf2c56a342ee73d91f1362a3511a6f4649896251667888f4d" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.396142 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zmtmw" Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.674900 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-6wk9t-config-wkj5n"] Dec 05 11:07:24 crc kubenswrapper[5014]: I1205 11:07:24.721449 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-6wk9t-config-wkj5n"] Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.338150 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" path="/var/lib/kubelet/pods/c46d6d28-4203-4a25-bcf5-b0c8325ee4ab/volumes" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.406293 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"17ca4de3640b520dbb1f85df56dd6fdcf88ee90e7cbf4d3548cf808309976785"} Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.555560 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-8hlb6"] Dec 05 11:07:25 crc kubenswrapper[5014]: E1205 11:07:25.555975 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="908fbcc8-261b-40a5-9f70-b041c908b47e" containerName="mariadb-database-create" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556001 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="908fbcc8-261b-40a5-9f70-b041c908b47e" containerName="mariadb-database-create" Dec 05 11:07:25 crc kubenswrapper[5014]: E1205 11:07:25.556017 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbef5fef-ac9d-4c62-af8e-9956ce64a080" containerName="mariadb-account-create-update" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556027 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbef5fef-ac9d-4c62-af8e-9956ce64a080" containerName="mariadb-account-create-update" Dec 05 11:07:25 crc kubenswrapper[5014]: E1205 11:07:25.556040 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="404eef16-b758-4079-bc0e-cf8c9a17ff11" containerName="mariadb-database-create" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556046 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="404eef16-b758-4079-bc0e-cf8c9a17ff11" containerName="mariadb-database-create" Dec 05 11:07:25 crc kubenswrapper[5014]: E1205 11:07:25.556071 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d05249d-225e-46be-9441-572d372fd7ab" containerName="mariadb-account-create-update" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556080 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d05249d-225e-46be-9441-572d372fd7ab" containerName="mariadb-account-create-update" Dec 05 11:07:25 crc kubenswrapper[5014]: E1205 11:07:25.556103 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bef9ec2f-df6e-4409-8e88-8ae5895faa24" containerName="mariadb-database-create" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556113 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="bef9ec2f-df6e-4409-8e88-8ae5895faa24" containerName="mariadb-database-create" Dec 05 11:07:25 crc kubenswrapper[5014]: E1205 11:07:25.556126 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="727cdfb6-ffb7-4e5f-9aed-b856d87bb80e" containerName="mariadb-account-create-update" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556133 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="727cdfb6-ffb7-4e5f-9aed-b856d87bb80e" containerName="mariadb-account-create-update" Dec 05 11:07:25 crc kubenswrapper[5014]: E1205 11:07:25.556150 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" containerName="ovn-config" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556157 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" containerName="ovn-config" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556408 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbef5fef-ac9d-4c62-af8e-9956ce64a080" containerName="mariadb-account-create-update" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556432 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="908fbcc8-261b-40a5-9f70-b041c908b47e" containerName="mariadb-database-create" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556455 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="727cdfb6-ffb7-4e5f-9aed-b856d87bb80e" containerName="mariadb-account-create-update" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556466 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="c46d6d28-4203-4a25-bcf5-b0c8325ee4ab" containerName="ovn-config" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556484 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d05249d-225e-46be-9441-572d372fd7ab" containerName="mariadb-account-create-update" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556499 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="404eef16-b758-4079-bc0e-cf8c9a17ff11" containerName="mariadb-database-create" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.556517 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="bef9ec2f-df6e-4409-8e88-8ae5895faa24" containerName="mariadb-database-create" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.557201 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.560320 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-57g5s" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.560949 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.562552 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-8hlb6"] Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.678416 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-combined-ca-bundle\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.678501 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hgwj\" (UniqueName: \"kubernetes.io/projected/9a59ccde-127a-4709-8ea1-efd59b48504f-kube-api-access-4hgwj\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.679104 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-db-sync-config-data\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.679128 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-config-data\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.780913 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-combined-ca-bundle\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.781039 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hgwj\" (UniqueName: \"kubernetes.io/projected/9a59ccde-127a-4709-8ea1-efd59b48504f-kube-api-access-4hgwj\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.781163 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-db-sync-config-data\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.781205 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-config-data\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.788646 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-config-data\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.794117 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-combined-ca-bundle\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.808117 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-db-sync-config-data\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.825546 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hgwj\" (UniqueName: \"kubernetes.io/projected/9a59ccde-127a-4709-8ea1-efd59b48504f-kube-api-access-4hgwj\") pod \"glance-db-sync-8hlb6\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:25 crc kubenswrapper[5014]: I1205 11:07:25.881976 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:26 crc kubenswrapper[5014]: I1205 11:07:26.728303 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-8hlb6"] Dec 05 11:07:26 crc kubenswrapper[5014]: W1205 11:07:26.731112 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a59ccde_127a_4709_8ea1_efd59b48504f.slice/crio-e85e912ef8c743a1e0f89774c5b014d89b1bc76e00bb8d18836d2dfa8e91ecb6 WatchSource:0}: Error finding container e85e912ef8c743a1e0f89774c5b014d89b1bc76e00bb8d18836d2dfa8e91ecb6: Status 404 returned error can't find the container with id e85e912ef8c743a1e0f89774c5b014d89b1bc76e00bb8d18836d2dfa8e91ecb6 Dec 05 11:07:27 crc kubenswrapper[5014]: I1205 11:07:27.438218 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"9557f8b67c4814d46fe5b57649fe6ccbc562261981eb155b4ed1291d30edecb5"} Dec 05 11:07:27 crc kubenswrapper[5014]: I1205 11:07:27.438539 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"b7d5880f656cd8fdce69096218cda7ad7888912ef37d54e14c2bc6c741c6ce2c"} Dec 05 11:07:27 crc kubenswrapper[5014]: I1205 11:07:27.438566 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"4f4c59a3afd5711263fbe1d6b5b5593e5dc47d1d138a0f3784c1bf10718c87cd"} Dec 05 11:07:27 crc kubenswrapper[5014]: I1205 11:07:27.438575 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"8a63b2c674b35459e3358d2fb75d27515153a343a618e01d512d699ad3460ec1"} Dec 05 11:07:27 crc kubenswrapper[5014]: I1205 11:07:27.439147 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8hlb6" event={"ID":"9a59ccde-127a-4709-8ea1-efd59b48504f","Type":"ContainerStarted","Data":"e85e912ef8c743a1e0f89774c5b014d89b1bc76e00bb8d18836d2dfa8e91ecb6"} Dec 05 11:07:29 crc kubenswrapper[5014]: I1205 11:07:29.501858 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"ed64b76aae07981fb7009c7acc52115439a279265e62c9cd16154498f4a4b7e5"} Dec 05 11:07:29 crc kubenswrapper[5014]: I1205 11:07:29.502486 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"4c1d4b27fd16062122c943fd30694cab9dda76c72fdaf740bd9aed854aa4ede8"} Dec 05 11:07:29 crc kubenswrapper[5014]: I1205 11:07:29.502502 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"70606604e82876aafd4045cda92120ebef09bba9c15bc2d125d0da6d4ffd680d"} Dec 05 11:07:29 crc kubenswrapper[5014]: I1205 11:07:29.502516 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"30f3d5903f4d6e74c9a9eb16eaa12b2e119ab3e525e1bec910792c1f36a34498"} Dec 05 11:07:30 crc kubenswrapper[5014]: I1205 11:07:30.539081 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"ba11a84b6b3d1ac05ec85f9e8139965157f29a2a07aab103ef045fbe1feb6a99"} Dec 05 11:07:31 crc kubenswrapper[5014]: I1205 11:07:31.719473 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.97:5671: connect: connection refused" Dec 05 11:07:32 crc kubenswrapper[5014]: I1205 11:07:32.037735 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Dec 05 11:07:32 crc kubenswrapper[5014]: I1205 11:07:32.567015 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"dd3631b57f979c0af77aafc8c74036d24384f49241479a3ed412ea43579a0931"} Dec 05 11:07:32 crc kubenswrapper[5014]: I1205 11:07:32.567062 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"944ccaf8-60a1-4574-8dec-60c5c7ea3dcf","Type":"ContainerStarted","Data":"4b93d98a13fa7d67939dce4b26a60f3c44d9475b0795136c9167f98fd784b660"} Dec 05 11:07:32 crc kubenswrapper[5014]: I1205 11:07:32.617154 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=38.319595758 podStartE2EDuration="45.617128956s" podCreationTimestamp="2025-12-05 11:06:47 +0000 UTC" firstStartedPulling="2025-12-05 11:07:21.273327641 +0000 UTC m=+1168.221445345" lastFinishedPulling="2025-12-05 11:07:28.570860829 +0000 UTC m=+1175.518978543" observedRunningTime="2025-12-05 11:07:32.603471974 +0000 UTC m=+1179.551589698" watchObservedRunningTime="2025-12-05 11:07:32.617128956 +0000 UTC m=+1179.565246670" Dec 05 11:07:32 crc kubenswrapper[5014]: I1205 11:07:32.871887 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-zjmn4"] Dec 05 11:07:32 crc kubenswrapper[5014]: I1205 11:07:32.873352 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:32 crc kubenswrapper[5014]: I1205 11:07:32.876107 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 11:07:32 crc kubenswrapper[5014]: I1205 11:07:32.904244 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-zjmn4"] Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.030689 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.030814 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g8zb\" (UniqueName: \"kubernetes.io/projected/8eb8cd04-c5a2-4a43-8648-80e74478ec75-kube-api-access-5g8zb\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.030907 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.030940 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-config\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.030955 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.030980 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.133082 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.133194 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g8zb\" (UniqueName: \"kubernetes.io/projected/8eb8cd04-c5a2-4a43-8648-80e74478ec75-kube-api-access-5g8zb\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.133265 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.133305 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-config\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.133328 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.134698 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.134698 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.134706 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-config\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.134955 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.134953 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.136401 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.183335 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g8zb\" (UniqueName: \"kubernetes.io/projected/8eb8cd04-c5a2-4a43-8648-80e74478ec75-kube-api-access-5g8zb\") pod \"dnsmasq-dns-6d5b6d6b67-zjmn4\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:33 crc kubenswrapper[5014]: I1205 11:07:33.201201 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:40 crc kubenswrapper[5014]: I1205 11:07:40.439776 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-zjmn4"] Dec 05 11:07:40 crc kubenswrapper[5014]: I1205 11:07:40.629137 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" event={"ID":"8eb8cd04-c5a2-4a43-8648-80e74478ec75","Type":"ContainerStarted","Data":"13bcf29c66eb5e9bf7546f4326cc7dce83b29b8c6e0d5d2107645fd0c68f696a"} Dec 05 11:07:41 crc kubenswrapper[5014]: I1205 11:07:41.640234 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8hlb6" event={"ID":"9a59ccde-127a-4709-8ea1-efd59b48504f","Type":"ContainerStarted","Data":"277f825b0886af74e153dd6df8fe783d0e0b39885a7de3f0842d8caa6db93f9a"} Dec 05 11:07:41 crc kubenswrapper[5014]: I1205 11:07:41.644521 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" event={"ID":"8eb8cd04-c5a2-4a43-8648-80e74478ec75","Type":"ContainerDied","Data":"0f76d4a9503fcc7f1d5cdab4a8c2506f01d2045033378f935502e9b9823ad0cf"} Dec 05 11:07:41 crc kubenswrapper[5014]: I1205 11:07:41.645426 5014 generic.go:334] "Generic (PLEG): container finished" podID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerID="0f76d4a9503fcc7f1d5cdab4a8c2506f01d2045033378f935502e9b9823ad0cf" exitCode=0 Dec 05 11:07:41 crc kubenswrapper[5014]: I1205 11:07:41.675004 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-8hlb6" podStartSLOduration=2.958233226 podStartE2EDuration="16.674985955s" podCreationTimestamp="2025-12-05 11:07:25 +0000 UTC" firstStartedPulling="2025-12-05 11:07:26.739049135 +0000 UTC m=+1173.687166839" lastFinishedPulling="2025-12-05 11:07:40.455801864 +0000 UTC m=+1187.403919568" observedRunningTime="2025-12-05 11:07:41.67146872 +0000 UTC m=+1188.619586464" watchObservedRunningTime="2025-12-05 11:07:41.674985955 +0000 UTC m=+1188.623103659" Dec 05 11:07:41 crc kubenswrapper[5014]: I1205 11:07:41.719939 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.025249 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-dz8p5"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.027011 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dz8p5" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.036459 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.050018 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-dz8p5"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.144235 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-nwrsm"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.147829 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nwrsm" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.148952 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwwsv\" (UniqueName: \"kubernetes.io/projected/5b3c2731-7987-4449-bc49-71d8f679b8b6-kube-api-access-jwwsv\") pod \"cinder-db-create-dz8p5\" (UID: \"5b3c2731-7987-4449-bc49-71d8f679b8b6\") " pod="openstack/cinder-db-create-dz8p5" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.149021 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b3c2731-7987-4449-bc49-71d8f679b8b6-operator-scripts\") pod \"cinder-db-create-dz8p5\" (UID: \"5b3c2731-7987-4449-bc49-71d8f679b8b6\") " pod="openstack/cinder-db-create-dz8p5" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.165208 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-914c-account-create-update-7m424"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.166357 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-914c-account-create-update-7m424" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.170885 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.188376 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nwrsm"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.226413 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-914c-account-create-update-7m424"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.251014 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9rt9\" (UniqueName: \"kubernetes.io/projected/5c290d7c-34a6-40f1-838d-379d5cee5319-kube-api-access-v9rt9\") pod \"barbican-db-create-nwrsm\" (UID: \"5c290d7c-34a6-40f1-838d-379d5cee5319\") " pod="openstack/barbican-db-create-nwrsm" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.251086 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwwsv\" (UniqueName: \"kubernetes.io/projected/5b3c2731-7987-4449-bc49-71d8f679b8b6-kube-api-access-jwwsv\") pod \"cinder-db-create-dz8p5\" (UID: \"5b3c2731-7987-4449-bc49-71d8f679b8b6\") " pod="openstack/cinder-db-create-dz8p5" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.251113 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b3c2731-7987-4449-bc49-71d8f679b8b6-operator-scripts\") pod \"cinder-db-create-dz8p5\" (UID: \"5b3c2731-7987-4449-bc49-71d8f679b8b6\") " pod="openstack/cinder-db-create-dz8p5" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.251155 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a613db58-0a2a-4960-a361-b02a32ed6713-operator-scripts\") pod \"barbican-914c-account-create-update-7m424\" (UID: \"a613db58-0a2a-4960-a361-b02a32ed6713\") " pod="openstack/barbican-914c-account-create-update-7m424" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.251197 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c290d7c-34a6-40f1-838d-379d5cee5319-operator-scripts\") pod \"barbican-db-create-nwrsm\" (UID: \"5c290d7c-34a6-40f1-838d-379d5cee5319\") " pod="openstack/barbican-db-create-nwrsm" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.251214 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssbqd\" (UniqueName: \"kubernetes.io/projected/a613db58-0a2a-4960-a361-b02a32ed6713-kube-api-access-ssbqd\") pod \"barbican-914c-account-create-update-7m424\" (UID: \"a613db58-0a2a-4960-a361-b02a32ed6713\") " pod="openstack/barbican-914c-account-create-update-7m424" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.252239 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b3c2731-7987-4449-bc49-71d8f679b8b6-operator-scripts\") pod \"cinder-db-create-dz8p5\" (UID: \"5b3c2731-7987-4449-bc49-71d8f679b8b6\") " pod="openstack/cinder-db-create-dz8p5" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.264331 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-26ee-account-create-update-mnkx4"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.265881 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-26ee-account-create-update-mnkx4" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.268582 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.289849 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwwsv\" (UniqueName: \"kubernetes.io/projected/5b3c2731-7987-4449-bc49-71d8f679b8b6-kube-api-access-jwwsv\") pod \"cinder-db-create-dz8p5\" (UID: \"5b3c2731-7987-4449-bc49-71d8f679b8b6\") " pod="openstack/cinder-db-create-dz8p5" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.345599 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-26ee-account-create-update-mnkx4"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.345951 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dz8p5" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.355907 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a613db58-0a2a-4960-a361-b02a32ed6713-operator-scripts\") pod \"barbican-914c-account-create-update-7m424\" (UID: \"a613db58-0a2a-4960-a361-b02a32ed6713\") " pod="openstack/barbican-914c-account-create-update-7m424" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.356027 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c290d7c-34a6-40f1-838d-379d5cee5319-operator-scripts\") pod \"barbican-db-create-nwrsm\" (UID: \"5c290d7c-34a6-40f1-838d-379d5cee5319\") " pod="openstack/barbican-db-create-nwrsm" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.356060 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssbqd\" (UniqueName: \"kubernetes.io/projected/a613db58-0a2a-4960-a361-b02a32ed6713-kube-api-access-ssbqd\") pod \"barbican-914c-account-create-update-7m424\" (UID: \"a613db58-0a2a-4960-a361-b02a32ed6713\") " pod="openstack/barbican-914c-account-create-update-7m424" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.356106 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9rt9\" (UniqueName: \"kubernetes.io/projected/5c290d7c-34a6-40f1-838d-379d5cee5319-kube-api-access-v9rt9\") pod \"barbican-db-create-nwrsm\" (UID: \"5c290d7c-34a6-40f1-838d-379d5cee5319\") " pod="openstack/barbican-db-create-nwrsm" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.356144 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-operator-scripts\") pod \"cinder-26ee-account-create-update-mnkx4\" (UID: \"71c84bd0-ef34-4a59-9542-4abfbf3ffb40\") " pod="openstack/cinder-26ee-account-create-update-mnkx4" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.356169 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfjt4\" (UniqueName: \"kubernetes.io/projected/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-kube-api-access-vfjt4\") pod \"cinder-26ee-account-create-update-mnkx4\" (UID: \"71c84bd0-ef34-4a59-9542-4abfbf3ffb40\") " pod="openstack/cinder-26ee-account-create-update-mnkx4" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.357323 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a613db58-0a2a-4960-a361-b02a32ed6713-operator-scripts\") pod \"barbican-914c-account-create-update-7m424\" (UID: \"a613db58-0a2a-4960-a361-b02a32ed6713\") " pod="openstack/barbican-914c-account-create-update-7m424" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.358421 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c290d7c-34a6-40f1-838d-379d5cee5319-operator-scripts\") pod \"barbican-db-create-nwrsm\" (UID: \"5c290d7c-34a6-40f1-838d-379d5cee5319\") " pod="openstack/barbican-db-create-nwrsm" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.369796 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-qb5xt"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.371823 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qb5xt" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.382686 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qb5xt"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.408122 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssbqd\" (UniqueName: \"kubernetes.io/projected/a613db58-0a2a-4960-a361-b02a32ed6713-kube-api-access-ssbqd\") pod \"barbican-914c-account-create-update-7m424\" (UID: \"a613db58-0a2a-4960-a361-b02a32ed6713\") " pod="openstack/barbican-914c-account-create-update-7m424" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.420096 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9rt9\" (UniqueName: \"kubernetes.io/projected/5c290d7c-34a6-40f1-838d-379d5cee5319-kube-api-access-v9rt9\") pod \"barbican-db-create-nwrsm\" (UID: \"5c290d7c-34a6-40f1-838d-379d5cee5319\") " pod="openstack/barbican-db-create-nwrsm" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.442842 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-9jkh2"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.444531 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.449741 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s452q" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.450005 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.450158 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.450354 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.457467 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-operator-scripts\") pod \"cinder-26ee-account-create-update-mnkx4\" (UID: \"71c84bd0-ef34-4a59-9542-4abfbf3ffb40\") " pod="openstack/cinder-26ee-account-create-update-mnkx4" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.457514 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfjt4\" (UniqueName: \"kubernetes.io/projected/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-kube-api-access-vfjt4\") pod \"cinder-26ee-account-create-update-mnkx4\" (UID: \"71c84bd0-ef34-4a59-9542-4abfbf3ffb40\") " pod="openstack/cinder-26ee-account-create-update-mnkx4" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.457558 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-operator-scripts\") pod \"neutron-db-create-qb5xt\" (UID: \"64e940f0-5a75-41aa-86e5-8da00fd4fe1b\") " pod="openstack/neutron-db-create-qb5xt" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.458157 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vvm6\" (UniqueName: \"kubernetes.io/projected/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-kube-api-access-8vvm6\") pod \"neutron-db-create-qb5xt\" (UID: \"64e940f0-5a75-41aa-86e5-8da00fd4fe1b\") " pod="openstack/neutron-db-create-qb5xt" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.458947 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-operator-scripts\") pod \"cinder-26ee-account-create-update-mnkx4\" (UID: \"71c84bd0-ef34-4a59-9542-4abfbf3ffb40\") " pod="openstack/cinder-26ee-account-create-update-mnkx4" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.472788 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-9jkh2"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.473197 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nwrsm" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.486499 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-ff76-account-create-update-mnw77"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.488063 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfjt4\" (UniqueName: \"kubernetes.io/projected/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-kube-api-access-vfjt4\") pod \"cinder-26ee-account-create-update-mnkx4\" (UID: \"71c84bd0-ef34-4a59-9542-4abfbf3ffb40\") " pod="openstack/cinder-26ee-account-create-update-mnkx4" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.488130 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ff76-account-create-update-mnw77" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.490807 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.494065 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-914c-account-create-update-7m424" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.510818 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ff76-account-create-update-mnw77"] Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.560379 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17393775-7e86-44ba-8fcc-d502ed251de0-operator-scripts\") pod \"neutron-ff76-account-create-update-mnw77\" (UID: \"17393775-7e86-44ba-8fcc-d502ed251de0\") " pod="openstack/neutron-ff76-account-create-update-mnw77" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.560471 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqmpq\" (UniqueName: \"kubernetes.io/projected/d02ac887-7c2b-4eea-bf6f-795359aa8b14-kube-api-access-kqmpq\") pod \"keystone-db-sync-9jkh2\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.560511 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z869d\" (UniqueName: \"kubernetes.io/projected/17393775-7e86-44ba-8fcc-d502ed251de0-kube-api-access-z869d\") pod \"neutron-ff76-account-create-update-mnw77\" (UID: \"17393775-7e86-44ba-8fcc-d502ed251de0\") " pod="openstack/neutron-ff76-account-create-update-mnw77" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.560604 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-combined-ca-bundle\") pod \"keystone-db-sync-9jkh2\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.560657 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-operator-scripts\") pod \"neutron-db-create-qb5xt\" (UID: \"64e940f0-5a75-41aa-86e5-8da00fd4fe1b\") " pod="openstack/neutron-db-create-qb5xt" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.560719 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-config-data\") pod \"keystone-db-sync-9jkh2\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.560745 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vvm6\" (UniqueName: \"kubernetes.io/projected/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-kube-api-access-8vvm6\") pod \"neutron-db-create-qb5xt\" (UID: \"64e940f0-5a75-41aa-86e5-8da00fd4fe1b\") " pod="openstack/neutron-db-create-qb5xt" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.574183 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-operator-scripts\") pod \"neutron-db-create-qb5xt\" (UID: \"64e940f0-5a75-41aa-86e5-8da00fd4fe1b\") " pod="openstack/neutron-db-create-qb5xt" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.656731 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-26ee-account-create-update-mnkx4" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.662979 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqmpq\" (UniqueName: \"kubernetes.io/projected/d02ac887-7c2b-4eea-bf6f-795359aa8b14-kube-api-access-kqmpq\") pod \"keystone-db-sync-9jkh2\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.663027 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z869d\" (UniqueName: \"kubernetes.io/projected/17393775-7e86-44ba-8fcc-d502ed251de0-kube-api-access-z869d\") pod \"neutron-ff76-account-create-update-mnw77\" (UID: \"17393775-7e86-44ba-8fcc-d502ed251de0\") " pod="openstack/neutron-ff76-account-create-update-mnw77" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.663089 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-combined-ca-bundle\") pod \"keystone-db-sync-9jkh2\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.663144 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-config-data\") pod \"keystone-db-sync-9jkh2\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.663191 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17393775-7e86-44ba-8fcc-d502ed251de0-operator-scripts\") pod \"neutron-ff76-account-create-update-mnw77\" (UID: \"17393775-7e86-44ba-8fcc-d502ed251de0\") " pod="openstack/neutron-ff76-account-create-update-mnw77" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.664089 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17393775-7e86-44ba-8fcc-d502ed251de0-operator-scripts\") pod \"neutron-ff76-account-create-update-mnw77\" (UID: \"17393775-7e86-44ba-8fcc-d502ed251de0\") " pod="openstack/neutron-ff76-account-create-update-mnw77" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.692390 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vvm6\" (UniqueName: \"kubernetes.io/projected/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-kube-api-access-8vvm6\") pod \"neutron-db-create-qb5xt\" (UID: \"64e940f0-5a75-41aa-86e5-8da00fd4fe1b\") " pod="openstack/neutron-db-create-qb5xt" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.698163 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-combined-ca-bundle\") pod \"keystone-db-sync-9jkh2\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.698454 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-config-data\") pod \"keystone-db-sync-9jkh2\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.701937 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z869d\" (UniqueName: \"kubernetes.io/projected/17393775-7e86-44ba-8fcc-d502ed251de0-kube-api-access-z869d\") pod \"neutron-ff76-account-create-update-mnw77\" (UID: \"17393775-7e86-44ba-8fcc-d502ed251de0\") " pod="openstack/neutron-ff76-account-create-update-mnw77" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.702850 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" event={"ID":"8eb8cd04-c5a2-4a43-8648-80e74478ec75","Type":"ContainerStarted","Data":"ca45bc629a54bb9822178789a6eaa7dbd592e9a1e24ba9bc6b7292973ca89575"} Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.708044 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqmpq\" (UniqueName: \"kubernetes.io/projected/d02ac887-7c2b-4eea-bf6f-795359aa8b14-kube-api-access-kqmpq\") pod \"keystone-db-sync-9jkh2\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.708150 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.713409 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qb5xt" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.731891 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.733321 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" podStartSLOduration=10.733303506 podStartE2EDuration="10.733303506s" podCreationTimestamp="2025-12-05 11:07:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:42.732768723 +0000 UTC m=+1189.680886437" watchObservedRunningTime="2025-12-05 11:07:42.733303506 +0000 UTC m=+1189.681421210" Dec 05 11:07:42 crc kubenswrapper[5014]: I1205 11:07:42.756698 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ff76-account-create-update-mnw77" Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.112593 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-dz8p5"] Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.472244 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-914c-account-create-update-7m424"] Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.549704 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nwrsm"] Dec 05 11:07:43 crc kubenswrapper[5014]: W1205 11:07:43.557546 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c290d7c_34a6_40f1_838d_379d5cee5319.slice/crio-7d2cb893a42bb3c6f950fa6319ba7c5ecaee923a2bb7d90ec889a41e532b14e2 WatchSource:0}: Error finding container 7d2cb893a42bb3c6f950fa6319ba7c5ecaee923a2bb7d90ec889a41e532b14e2: Status 404 returned error can't find the container with id 7d2cb893a42bb3c6f950fa6319ba7c5ecaee923a2bb7d90ec889a41e532b14e2 Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.568683 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-26ee-account-create-update-mnkx4"] Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.578039 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qb5xt"] Dec 05 11:07:43 crc kubenswrapper[5014]: W1205 11:07:43.580732 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64e940f0_5a75_41aa_86e5_8da00fd4fe1b.slice/crio-f1c926082b7e362950ea804bb0c68b7f276a11cfc10bf0b5b30b76992560dda0 WatchSource:0}: Error finding container f1c926082b7e362950ea804bb0c68b7f276a11cfc10bf0b5b30b76992560dda0: Status 404 returned error can't find the container with id f1c926082b7e362950ea804bb0c68b7f276a11cfc10bf0b5b30b76992560dda0 Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.682406 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-9jkh2"] Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.692731 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ff76-account-create-update-mnw77"] Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.713710 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nwrsm" event={"ID":"5c290d7c-34a6-40f1-838d-379d5cee5319","Type":"ContainerStarted","Data":"7d2cb893a42bb3c6f950fa6319ba7c5ecaee923a2bb7d90ec889a41e532b14e2"} Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.715862 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-26ee-account-create-update-mnkx4" event={"ID":"71c84bd0-ef34-4a59-9542-4abfbf3ffb40","Type":"ContainerStarted","Data":"e6a088ed0dd1453860e164b35a10eb109744c08311cbb9ee7b5ae8d75a90db39"} Dec 05 11:07:43 crc kubenswrapper[5014]: W1205 11:07:43.715999 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17393775_7e86_44ba_8fcc_d502ed251de0.slice/crio-c16f97396c19acebe7c9741703eabe1a3cc01d09ddcafb22e063b8f1ef33acb6 WatchSource:0}: Error finding container c16f97396c19acebe7c9741703eabe1a3cc01d09ddcafb22e063b8f1ef33acb6: Status 404 returned error can't find the container with id c16f97396c19acebe7c9741703eabe1a3cc01d09ddcafb22e063b8f1ef33acb6 Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.720614 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-914c-account-create-update-7m424" event={"ID":"a613db58-0a2a-4960-a361-b02a32ed6713","Type":"ContainerStarted","Data":"57ae6b616ab62d6a5d4c3bc72f7829b97b2f7bbd05f6de3092f926a4af100499"} Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.724730 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qb5xt" event={"ID":"64e940f0-5a75-41aa-86e5-8da00fd4fe1b","Type":"ContainerStarted","Data":"f1c926082b7e362950ea804bb0c68b7f276a11cfc10bf0b5b30b76992560dda0"} Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.731520 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-9jkh2" event={"ID":"d02ac887-7c2b-4eea-bf6f-795359aa8b14","Type":"ContainerStarted","Data":"5b99837afaf7babf3f048cd34b66a08d57094d81556387972bee0ec63f937381"} Dec 05 11:07:43 crc kubenswrapper[5014]: I1205 11:07:43.735052 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dz8p5" event={"ID":"5b3c2731-7987-4449-bc49-71d8f679b8b6","Type":"ContainerStarted","Data":"fa3cfc5cc772cbe734f95b2f14923ba9c3820466b48fdc846217dcc7565ba11b"} Dec 05 11:07:44 crc kubenswrapper[5014]: I1205 11:07:44.746884 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ff76-account-create-update-mnw77" event={"ID":"17393775-7e86-44ba-8fcc-d502ed251de0","Type":"ContainerStarted","Data":"c16f97396c19acebe7c9741703eabe1a3cc01d09ddcafb22e063b8f1ef33acb6"} Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.756578 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dz8p5" event={"ID":"5b3c2731-7987-4449-bc49-71d8f679b8b6","Type":"ContainerStarted","Data":"9a66f6c710634dd17a659e4bc3df6445db3864596381cd9c2ca97e620b9a0c1b"} Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.763974 5014 generic.go:334] "Generic (PLEG): container finished" podID="5c290d7c-34a6-40f1-838d-379d5cee5319" containerID="abe50be9dd8d0a00f62d8acce1aa9168554c2e21ef19aad43a6c354e448eca4b" exitCode=0 Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.764161 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nwrsm" event={"ID":"5c290d7c-34a6-40f1-838d-379d5cee5319","Type":"ContainerDied","Data":"abe50be9dd8d0a00f62d8acce1aa9168554c2e21ef19aad43a6c354e448eca4b"} Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.768184 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-26ee-account-create-update-mnkx4" event={"ID":"71c84bd0-ef34-4a59-9542-4abfbf3ffb40","Type":"ContainerStarted","Data":"5a0cf3d49f206f3a3137a5f6e5974e246d7030caa84f0ff30e86c711780d1a2b"} Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.771541 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-914c-account-create-update-7m424" event={"ID":"a613db58-0a2a-4960-a361-b02a32ed6713","Type":"ContainerStarted","Data":"4737acc92b37695fddb60d25976e76f21f0e21349485234480ba45da5a95e862"} Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.776310 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ff76-account-create-update-mnw77" event={"ID":"17393775-7e86-44ba-8fcc-d502ed251de0","Type":"ContainerStarted","Data":"345a6703ed9d14e9283ac4bae1e8267ef6e05ea14c6d6dde9a55d87ddff8f875"} Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.779416 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-dz8p5" podStartSLOduration=4.779399243 podStartE2EDuration="4.779399243s" podCreationTimestamp="2025-12-05 11:07:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:45.775764094 +0000 UTC m=+1192.723881798" watchObservedRunningTime="2025-12-05 11:07:45.779399243 +0000 UTC m=+1192.727516937" Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.779483 5014 generic.go:334] "Generic (PLEG): container finished" podID="64e940f0-5a75-41aa-86e5-8da00fd4fe1b" containerID="0543cdb8eda5b3fb598c4009f0a990a403780ebe9097d42fad28008089ceedfa" exitCode=0 Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.779523 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qb5xt" event={"ID":"64e940f0-5a75-41aa-86e5-8da00fd4fe1b","Type":"ContainerDied","Data":"0543cdb8eda5b3fb598c4009f0a990a403780ebe9097d42fad28008089ceedfa"} Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.810632 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-914c-account-create-update-7m424" podStartSLOduration=3.810612909 podStartE2EDuration="3.810612909s" podCreationTimestamp="2025-12-05 11:07:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:45.808053127 +0000 UTC m=+1192.756170831" watchObservedRunningTime="2025-12-05 11:07:45.810612909 +0000 UTC m=+1192.758730613" Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.823383 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-26ee-account-create-update-mnkx4" podStartSLOduration=3.8233602380000002 podStartE2EDuration="3.823360238s" podCreationTimestamp="2025-12-05 11:07:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:45.820366856 +0000 UTC m=+1192.768484570" watchObservedRunningTime="2025-12-05 11:07:45.823360238 +0000 UTC m=+1192.771477952" Dec 05 11:07:45 crc kubenswrapper[5014]: I1205 11:07:45.858808 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-ff76-account-create-update-mnw77" podStartSLOduration=3.858791357 podStartE2EDuration="3.858791357s" podCreationTimestamp="2025-12-05 11:07:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:07:45.850480525 +0000 UTC m=+1192.798598249" watchObservedRunningTime="2025-12-05 11:07:45.858791357 +0000 UTC m=+1192.806909061" Dec 05 11:07:46 crc kubenswrapper[5014]: I1205 11:07:46.792232 5014 generic.go:334] "Generic (PLEG): container finished" podID="71c84bd0-ef34-4a59-9542-4abfbf3ffb40" containerID="5a0cf3d49f206f3a3137a5f6e5974e246d7030caa84f0ff30e86c711780d1a2b" exitCode=0 Dec 05 11:07:46 crc kubenswrapper[5014]: I1205 11:07:46.792327 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-26ee-account-create-update-mnkx4" event={"ID":"71c84bd0-ef34-4a59-9542-4abfbf3ffb40","Type":"ContainerDied","Data":"5a0cf3d49f206f3a3137a5f6e5974e246d7030caa84f0ff30e86c711780d1a2b"} Dec 05 11:07:46 crc kubenswrapper[5014]: I1205 11:07:46.794605 5014 generic.go:334] "Generic (PLEG): container finished" podID="a613db58-0a2a-4960-a361-b02a32ed6713" containerID="4737acc92b37695fddb60d25976e76f21f0e21349485234480ba45da5a95e862" exitCode=0 Dec 05 11:07:46 crc kubenswrapper[5014]: I1205 11:07:46.794669 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-914c-account-create-update-7m424" event={"ID":"a613db58-0a2a-4960-a361-b02a32ed6713","Type":"ContainerDied","Data":"4737acc92b37695fddb60d25976e76f21f0e21349485234480ba45da5a95e862"} Dec 05 11:07:46 crc kubenswrapper[5014]: I1205 11:07:46.798351 5014 generic.go:334] "Generic (PLEG): container finished" podID="17393775-7e86-44ba-8fcc-d502ed251de0" containerID="345a6703ed9d14e9283ac4bae1e8267ef6e05ea14c6d6dde9a55d87ddff8f875" exitCode=0 Dec 05 11:07:46 crc kubenswrapper[5014]: I1205 11:07:46.798437 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ff76-account-create-update-mnw77" event={"ID":"17393775-7e86-44ba-8fcc-d502ed251de0","Type":"ContainerDied","Data":"345a6703ed9d14e9283ac4bae1e8267ef6e05ea14c6d6dde9a55d87ddff8f875"} Dec 05 11:07:46 crc kubenswrapper[5014]: I1205 11:07:46.799993 5014 generic.go:334] "Generic (PLEG): container finished" podID="5b3c2731-7987-4449-bc49-71d8f679b8b6" containerID="9a66f6c710634dd17a659e4bc3df6445db3864596381cd9c2ca97e620b9a0c1b" exitCode=0 Dec 05 11:07:46 crc kubenswrapper[5014]: I1205 11:07:46.800036 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dz8p5" event={"ID":"5b3c2731-7987-4449-bc49-71d8f679b8b6","Type":"ContainerDied","Data":"9a66f6c710634dd17a659e4bc3df6445db3864596381cd9c2ca97e620b9a0c1b"} Dec 05 11:07:48 crc kubenswrapper[5014]: I1205 11:07:48.203846 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:07:48 crc kubenswrapper[5014]: I1205 11:07:48.268362 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6jd9j"] Dec 05 11:07:48 crc kubenswrapper[5014]: I1205 11:07:48.268719 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" podUID="3f1bd062-017b-44e7-bad9-09ad9138ebcf" containerName="dnsmasq-dns" containerID="cri-o://af564453c3a6f10be6edad2ef9073512a99e8d4116011d12211dd42b5e5951ff" gracePeriod=10 Dec 05 11:07:48 crc kubenswrapper[5014]: I1205 11:07:48.819402 5014 generic.go:334] "Generic (PLEG): container finished" podID="3f1bd062-017b-44e7-bad9-09ad9138ebcf" containerID="af564453c3a6f10be6edad2ef9073512a99e8d4116011d12211dd42b5e5951ff" exitCode=0 Dec 05 11:07:48 crc kubenswrapper[5014]: I1205 11:07:48.819701 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" event={"ID":"3f1bd062-017b-44e7-bad9-09ad9138ebcf","Type":"ContainerDied","Data":"af564453c3a6f10be6edad2ef9073512a99e8d4116011d12211dd42b5e5951ff"} Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.209783 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-914c-account-create-update-7m424" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.272432 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-26ee-account-create-update-mnkx4" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.279086 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dz8p5" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.284842 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ff76-account-create-update-mnw77" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.311380 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qb5xt" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.314198 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nwrsm" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.324779 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.383093 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a613db58-0a2a-4960-a361-b02a32ed6713-operator-scripts\") pod \"a613db58-0a2a-4960-a361-b02a32ed6713\" (UID: \"a613db58-0a2a-4960-a361-b02a32ed6713\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.383150 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17393775-7e86-44ba-8fcc-d502ed251de0-operator-scripts\") pod \"17393775-7e86-44ba-8fcc-d502ed251de0\" (UID: \"17393775-7e86-44ba-8fcc-d502ed251de0\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.383176 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b3c2731-7987-4449-bc49-71d8f679b8b6-operator-scripts\") pod \"5b3c2731-7987-4449-bc49-71d8f679b8b6\" (UID: \"5b3c2731-7987-4449-bc49-71d8f679b8b6\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.383317 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-operator-scripts\") pod \"71c84bd0-ef34-4a59-9542-4abfbf3ffb40\" (UID: \"71c84bd0-ef34-4a59-9542-4abfbf3ffb40\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.383415 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfjt4\" (UniqueName: \"kubernetes.io/projected/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-kube-api-access-vfjt4\") pod \"71c84bd0-ef34-4a59-9542-4abfbf3ffb40\" (UID: \"71c84bd0-ef34-4a59-9542-4abfbf3ffb40\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.383470 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwwsv\" (UniqueName: \"kubernetes.io/projected/5b3c2731-7987-4449-bc49-71d8f679b8b6-kube-api-access-jwwsv\") pod \"5b3c2731-7987-4449-bc49-71d8f679b8b6\" (UID: \"5b3c2731-7987-4449-bc49-71d8f679b8b6\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.383495 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssbqd\" (UniqueName: \"kubernetes.io/projected/a613db58-0a2a-4960-a361-b02a32ed6713-kube-api-access-ssbqd\") pod \"a613db58-0a2a-4960-a361-b02a32ed6713\" (UID: \"a613db58-0a2a-4960-a361-b02a32ed6713\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.383548 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z869d\" (UniqueName: \"kubernetes.io/projected/17393775-7e86-44ba-8fcc-d502ed251de0-kube-api-access-z869d\") pod \"17393775-7e86-44ba-8fcc-d502ed251de0\" (UID: \"17393775-7e86-44ba-8fcc-d502ed251de0\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.384648 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "71c84bd0-ef34-4a59-9542-4abfbf3ffb40" (UID: "71c84bd0-ef34-4a59-9542-4abfbf3ffb40"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.384961 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a613db58-0a2a-4960-a361-b02a32ed6713-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a613db58-0a2a-4960-a361-b02a32ed6713" (UID: "a613db58-0a2a-4960-a361-b02a32ed6713"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.385022 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b3c2731-7987-4449-bc49-71d8f679b8b6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5b3c2731-7987-4449-bc49-71d8f679b8b6" (UID: "5b3c2731-7987-4449-bc49-71d8f679b8b6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.385019 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17393775-7e86-44ba-8fcc-d502ed251de0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "17393775-7e86-44ba-8fcc-d502ed251de0" (UID: "17393775-7e86-44ba-8fcc-d502ed251de0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.390366 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-kube-api-access-vfjt4" (OuterVolumeSpecName: "kube-api-access-vfjt4") pod "71c84bd0-ef34-4a59-9542-4abfbf3ffb40" (UID: "71c84bd0-ef34-4a59-9542-4abfbf3ffb40"). InnerVolumeSpecName "kube-api-access-vfjt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.392375 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a613db58-0a2a-4960-a361-b02a32ed6713-kube-api-access-ssbqd" (OuterVolumeSpecName: "kube-api-access-ssbqd") pod "a613db58-0a2a-4960-a361-b02a32ed6713" (UID: "a613db58-0a2a-4960-a361-b02a32ed6713"). InnerVolumeSpecName "kube-api-access-ssbqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.392443 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b3c2731-7987-4449-bc49-71d8f679b8b6-kube-api-access-jwwsv" (OuterVolumeSpecName: "kube-api-access-jwwsv") pod "5b3c2731-7987-4449-bc49-71d8f679b8b6" (UID: "5b3c2731-7987-4449-bc49-71d8f679b8b6"). InnerVolumeSpecName "kube-api-access-jwwsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.392456 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17393775-7e86-44ba-8fcc-d502ed251de0-kube-api-access-z869d" (OuterVolumeSpecName: "kube-api-access-z869d") pod "17393775-7e86-44ba-8fcc-d502ed251de0" (UID: "17393775-7e86-44ba-8fcc-d502ed251de0"). InnerVolumeSpecName "kube-api-access-z869d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.485098 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-nb\") pod \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.485208 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9rt9\" (UniqueName: \"kubernetes.io/projected/5c290d7c-34a6-40f1-838d-379d5cee5319-kube-api-access-v9rt9\") pod \"5c290d7c-34a6-40f1-838d-379d5cee5319\" (UID: \"5c290d7c-34a6-40f1-838d-379d5cee5319\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.485258 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-dns-svc\") pod \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.485347 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-config\") pod \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.485397 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-sb\") pod \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.485435 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpjrf\" (UniqueName: \"kubernetes.io/projected/3f1bd062-017b-44e7-bad9-09ad9138ebcf-kube-api-access-bpjrf\") pod \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\" (UID: \"3f1bd062-017b-44e7-bad9-09ad9138ebcf\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.485476 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vvm6\" (UniqueName: \"kubernetes.io/projected/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-kube-api-access-8vvm6\") pod \"64e940f0-5a75-41aa-86e5-8da00fd4fe1b\" (UID: \"64e940f0-5a75-41aa-86e5-8da00fd4fe1b\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.485514 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-operator-scripts\") pod \"64e940f0-5a75-41aa-86e5-8da00fd4fe1b\" (UID: \"64e940f0-5a75-41aa-86e5-8da00fd4fe1b\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.485533 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c290d7c-34a6-40f1-838d-379d5cee5319-operator-scripts\") pod \"5c290d7c-34a6-40f1-838d-379d5cee5319\" (UID: \"5c290d7c-34a6-40f1-838d-379d5cee5319\") " Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.486016 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a613db58-0a2a-4960-a361-b02a32ed6713-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.486046 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17393775-7e86-44ba-8fcc-d502ed251de0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.486059 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b3c2731-7987-4449-bc49-71d8f679b8b6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.486071 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.486083 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfjt4\" (UniqueName: \"kubernetes.io/projected/71c84bd0-ef34-4a59-9542-4abfbf3ffb40-kube-api-access-vfjt4\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.486096 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwwsv\" (UniqueName: \"kubernetes.io/projected/5b3c2731-7987-4449-bc49-71d8f679b8b6-kube-api-access-jwwsv\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.486109 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssbqd\" (UniqueName: \"kubernetes.io/projected/a613db58-0a2a-4960-a361-b02a32ed6713-kube-api-access-ssbqd\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.486124 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z869d\" (UniqueName: \"kubernetes.io/projected/17393775-7e86-44ba-8fcc-d502ed251de0-kube-api-access-z869d\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.489460 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "64e940f0-5a75-41aa-86e5-8da00fd4fe1b" (UID: "64e940f0-5a75-41aa-86e5-8da00fd4fe1b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.489910 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c290d7c-34a6-40f1-838d-379d5cee5319-kube-api-access-v9rt9" (OuterVolumeSpecName: "kube-api-access-v9rt9") pod "5c290d7c-34a6-40f1-838d-379d5cee5319" (UID: "5c290d7c-34a6-40f1-838d-379d5cee5319"). InnerVolumeSpecName "kube-api-access-v9rt9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.490488 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-kube-api-access-8vvm6" (OuterVolumeSpecName: "kube-api-access-8vvm6") pod "64e940f0-5a75-41aa-86e5-8da00fd4fe1b" (UID: "64e940f0-5a75-41aa-86e5-8da00fd4fe1b"). InnerVolumeSpecName "kube-api-access-8vvm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.490729 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f1bd062-017b-44e7-bad9-09ad9138ebcf-kube-api-access-bpjrf" (OuterVolumeSpecName: "kube-api-access-bpjrf") pod "3f1bd062-017b-44e7-bad9-09ad9138ebcf" (UID: "3f1bd062-017b-44e7-bad9-09ad9138ebcf"). InnerVolumeSpecName "kube-api-access-bpjrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.490744 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c290d7c-34a6-40f1-838d-379d5cee5319-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5c290d7c-34a6-40f1-838d-379d5cee5319" (UID: "5c290d7c-34a6-40f1-838d-379d5cee5319"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.526095 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-config" (OuterVolumeSpecName: "config") pod "3f1bd062-017b-44e7-bad9-09ad9138ebcf" (UID: "3f1bd062-017b-44e7-bad9-09ad9138ebcf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.527322 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3f1bd062-017b-44e7-bad9-09ad9138ebcf" (UID: "3f1bd062-017b-44e7-bad9-09ad9138ebcf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.528029 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3f1bd062-017b-44e7-bad9-09ad9138ebcf" (UID: "3f1bd062-017b-44e7-bad9-09ad9138ebcf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.535509 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3f1bd062-017b-44e7-bad9-09ad9138ebcf" (UID: "3f1bd062-017b-44e7-bad9-09ad9138ebcf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.587624 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpjrf\" (UniqueName: \"kubernetes.io/projected/3f1bd062-017b-44e7-bad9-09ad9138ebcf-kube-api-access-bpjrf\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.587663 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vvm6\" (UniqueName: \"kubernetes.io/projected/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-kube-api-access-8vvm6\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.587673 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64e940f0-5a75-41aa-86e5-8da00fd4fe1b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.587685 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c290d7c-34a6-40f1-838d-379d5cee5319-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.587694 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.587703 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9rt9\" (UniqueName: \"kubernetes.io/projected/5c290d7c-34a6-40f1-838d-379d5cee5319-kube-api-access-v9rt9\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.587712 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.587722 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.587730 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f1bd062-017b-44e7-bad9-09ad9138ebcf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.862102 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-9jkh2" event={"ID":"d02ac887-7c2b-4eea-bf6f-795359aa8b14","Type":"ContainerStarted","Data":"f4b2ac3215592674bbfc6d78eb54803963fa13c601a4479b4ed488c17d8d805f"} Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.864928 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dz8p5" event={"ID":"5b3c2731-7987-4449-bc49-71d8f679b8b6","Type":"ContainerDied","Data":"fa3cfc5cc772cbe734f95b2f14923ba9c3820466b48fdc846217dcc7565ba11b"} Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.864961 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa3cfc5cc772cbe734f95b2f14923ba9c3820466b48fdc846217dcc7565ba11b" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.865013 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dz8p5" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.870485 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nwrsm" event={"ID":"5c290d7c-34a6-40f1-838d-379d5cee5319","Type":"ContainerDied","Data":"7d2cb893a42bb3c6f950fa6319ba7c5ecaee923a2bb7d90ec889a41e532b14e2"} Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.870515 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d2cb893a42bb3c6f950fa6319ba7c5ecaee923a2bb7d90ec889a41e532b14e2" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.870546 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nwrsm" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.872228 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-26ee-account-create-update-mnkx4" event={"ID":"71c84bd0-ef34-4a59-9542-4abfbf3ffb40","Type":"ContainerDied","Data":"e6a088ed0dd1453860e164b35a10eb109744c08311cbb9ee7b5ae8d75a90db39"} Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.872259 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6a088ed0dd1453860e164b35a10eb109744c08311cbb9ee7b5ae8d75a90db39" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.872480 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-26ee-account-create-update-mnkx4" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.874014 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-914c-account-create-update-7m424" event={"ID":"a613db58-0a2a-4960-a361-b02a32ed6713","Type":"ContainerDied","Data":"57ae6b616ab62d6a5d4c3bc72f7829b97b2f7bbd05f6de3092f926a4af100499"} Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.874048 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57ae6b616ab62d6a5d4c3bc72f7829b97b2f7bbd05f6de3092f926a4af100499" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.874107 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-914c-account-create-update-7m424" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.875816 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ff76-account-create-update-mnw77" event={"ID":"17393775-7e86-44ba-8fcc-d502ed251de0","Type":"ContainerDied","Data":"c16f97396c19acebe7c9741703eabe1a3cc01d09ddcafb22e063b8f1ef33acb6"} Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.875848 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c16f97396c19acebe7c9741703eabe1a3cc01d09ddcafb22e063b8f1ef33acb6" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.875831 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ff76-account-create-update-mnw77" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.877773 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qb5xt" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.877804 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qb5xt" event={"ID":"64e940f0-5a75-41aa-86e5-8da00fd4fe1b","Type":"ContainerDied","Data":"f1c926082b7e362950ea804bb0c68b7f276a11cfc10bf0b5b30b76992560dda0"} Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.878022 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1c926082b7e362950ea804bb0c68b7f276a11cfc10bf0b5b30b76992560dda0" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.881038 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" event={"ID":"3f1bd062-017b-44e7-bad9-09ad9138ebcf","Type":"ContainerDied","Data":"77be6e9dfdb899a1814c1374d9e3abcbeec64af9f9ac9931aa93651af359690e"} Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.881101 5014 scope.go:117] "RemoveContainer" containerID="af564453c3a6f10be6edad2ef9073512a99e8d4116011d12211dd42b5e5951ff" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.881101 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6jd9j" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.890733 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-9jkh2" podStartSLOduration=2.569433145 podStartE2EDuration="10.890705845s" podCreationTimestamp="2025-12-05 11:07:42 +0000 UTC" firstStartedPulling="2025-12-05 11:07:43.705135939 +0000 UTC m=+1190.653253643" lastFinishedPulling="2025-12-05 11:07:52.026408639 +0000 UTC m=+1198.974526343" observedRunningTime="2025-12-05 11:07:52.886540604 +0000 UTC m=+1199.834658338" watchObservedRunningTime="2025-12-05 11:07:52.890705845 +0000 UTC m=+1199.838823549" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.913717 5014 scope.go:117] "RemoveContainer" containerID="3444ec4072cf748b91556c9b0c2b06774c10e5ac2205bfed338e1eaa95663ced" Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.946246 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6jd9j"] Dec 05 11:07:52 crc kubenswrapper[5014]: I1205 11:07:52.953938 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6jd9j"] Dec 05 11:07:53 crc kubenswrapper[5014]: I1205 11:07:53.337742 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f1bd062-017b-44e7-bad9-09ad9138ebcf" path="/var/lib/kubelet/pods/3f1bd062-017b-44e7-bad9-09ad9138ebcf/volumes" Dec 05 11:07:53 crc kubenswrapper[5014]: I1205 11:07:53.893902 5014 generic.go:334] "Generic (PLEG): container finished" podID="9a59ccde-127a-4709-8ea1-efd59b48504f" containerID="277f825b0886af74e153dd6df8fe783d0e0b39885a7de3f0842d8caa6db93f9a" exitCode=0 Dec 05 11:07:53 crc kubenswrapper[5014]: I1205 11:07:53.893994 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8hlb6" event={"ID":"9a59ccde-127a-4709-8ea1-efd59b48504f","Type":"ContainerDied","Data":"277f825b0886af74e153dd6df8fe783d0e0b39885a7de3f0842d8caa6db93f9a"} Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.268393 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.352602 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-db-sync-config-data\") pod \"9a59ccde-127a-4709-8ea1-efd59b48504f\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.352809 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-combined-ca-bundle\") pod \"9a59ccde-127a-4709-8ea1-efd59b48504f\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.352903 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hgwj\" (UniqueName: \"kubernetes.io/projected/9a59ccde-127a-4709-8ea1-efd59b48504f-kube-api-access-4hgwj\") pod \"9a59ccde-127a-4709-8ea1-efd59b48504f\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.352949 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-config-data\") pod \"9a59ccde-127a-4709-8ea1-efd59b48504f\" (UID: \"9a59ccde-127a-4709-8ea1-efd59b48504f\") " Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.359263 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a59ccde-127a-4709-8ea1-efd59b48504f-kube-api-access-4hgwj" (OuterVolumeSpecName: "kube-api-access-4hgwj") pod "9a59ccde-127a-4709-8ea1-efd59b48504f" (UID: "9a59ccde-127a-4709-8ea1-efd59b48504f"). InnerVolumeSpecName "kube-api-access-4hgwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.372594 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "9a59ccde-127a-4709-8ea1-efd59b48504f" (UID: "9a59ccde-127a-4709-8ea1-efd59b48504f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.380530 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a59ccde-127a-4709-8ea1-efd59b48504f" (UID: "9a59ccde-127a-4709-8ea1-efd59b48504f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.405605 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-config-data" (OuterVolumeSpecName: "config-data") pod "9a59ccde-127a-4709-8ea1-efd59b48504f" (UID: "9a59ccde-127a-4709-8ea1-efd59b48504f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.454632 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hgwj\" (UniqueName: \"kubernetes.io/projected/9a59ccde-127a-4709-8ea1-efd59b48504f-kube-api-access-4hgwj\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.454681 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.454693 5014 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.454702 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a59ccde-127a-4709-8ea1-efd59b48504f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.911190 5014 generic.go:334] "Generic (PLEG): container finished" podID="d02ac887-7c2b-4eea-bf6f-795359aa8b14" containerID="f4b2ac3215592674bbfc6d78eb54803963fa13c601a4479b4ed488c17d8d805f" exitCode=0 Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.911512 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-9jkh2" event={"ID":"d02ac887-7c2b-4eea-bf6f-795359aa8b14","Type":"ContainerDied","Data":"f4b2ac3215592674bbfc6d78eb54803963fa13c601a4479b4ed488c17d8d805f"} Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.914113 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8hlb6" event={"ID":"9a59ccde-127a-4709-8ea1-efd59b48504f","Type":"ContainerDied","Data":"e85e912ef8c743a1e0f89774c5b014d89b1bc76e00bb8d18836d2dfa8e91ecb6"} Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.914133 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e85e912ef8c743a1e0f89774c5b014d89b1bc76e00bb8d18836d2dfa8e91ecb6" Dec 05 11:07:55 crc kubenswrapper[5014]: I1205 11:07:55.914193 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8hlb6" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.313782 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-k94w2"] Dec 05 11:07:56 crc kubenswrapper[5014]: E1205 11:07:56.314163 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f1bd062-017b-44e7-bad9-09ad9138ebcf" containerName="init" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314176 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f1bd062-017b-44e7-bad9-09ad9138ebcf" containerName="init" Dec 05 11:07:56 crc kubenswrapper[5014]: E1205 11:07:56.314185 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17393775-7e86-44ba-8fcc-d502ed251de0" containerName="mariadb-account-create-update" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314191 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="17393775-7e86-44ba-8fcc-d502ed251de0" containerName="mariadb-account-create-update" Dec 05 11:07:56 crc kubenswrapper[5014]: E1205 11:07:56.314202 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a59ccde-127a-4709-8ea1-efd59b48504f" containerName="glance-db-sync" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314208 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a59ccde-127a-4709-8ea1-efd59b48504f" containerName="glance-db-sync" Dec 05 11:07:56 crc kubenswrapper[5014]: E1205 11:07:56.314219 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c290d7c-34a6-40f1-838d-379d5cee5319" containerName="mariadb-database-create" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314227 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c290d7c-34a6-40f1-838d-379d5cee5319" containerName="mariadb-database-create" Dec 05 11:07:56 crc kubenswrapper[5014]: E1205 11:07:56.314240 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71c84bd0-ef34-4a59-9542-4abfbf3ffb40" containerName="mariadb-account-create-update" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314246 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="71c84bd0-ef34-4a59-9542-4abfbf3ffb40" containerName="mariadb-account-create-update" Dec 05 11:07:56 crc kubenswrapper[5014]: E1205 11:07:56.314258 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b3c2731-7987-4449-bc49-71d8f679b8b6" containerName="mariadb-database-create" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314264 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b3c2731-7987-4449-bc49-71d8f679b8b6" containerName="mariadb-database-create" Dec 05 11:07:56 crc kubenswrapper[5014]: E1205 11:07:56.314288 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a613db58-0a2a-4960-a361-b02a32ed6713" containerName="mariadb-account-create-update" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314294 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="a613db58-0a2a-4960-a361-b02a32ed6713" containerName="mariadb-account-create-update" Dec 05 11:07:56 crc kubenswrapper[5014]: E1205 11:07:56.314307 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f1bd062-017b-44e7-bad9-09ad9138ebcf" containerName="dnsmasq-dns" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314312 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f1bd062-017b-44e7-bad9-09ad9138ebcf" containerName="dnsmasq-dns" Dec 05 11:07:56 crc kubenswrapper[5014]: E1205 11:07:56.314325 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e940f0-5a75-41aa-86e5-8da00fd4fe1b" containerName="mariadb-database-create" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314332 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e940f0-5a75-41aa-86e5-8da00fd4fe1b" containerName="mariadb-database-create" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314543 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f1bd062-017b-44e7-bad9-09ad9138ebcf" containerName="dnsmasq-dns" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314563 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a59ccde-127a-4709-8ea1-efd59b48504f" containerName="glance-db-sync" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314592 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c290d7c-34a6-40f1-838d-379d5cee5319" containerName="mariadb-database-create" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314600 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="17393775-7e86-44ba-8fcc-d502ed251de0" containerName="mariadb-account-create-update" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314612 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="a613db58-0a2a-4960-a361-b02a32ed6713" containerName="mariadb-account-create-update" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314622 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e940f0-5a75-41aa-86e5-8da00fd4fe1b" containerName="mariadb-database-create" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314634 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b3c2731-7987-4449-bc49-71d8f679b8b6" containerName="mariadb-database-create" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.314643 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="71c84bd0-ef34-4a59-9542-4abfbf3ffb40" containerName="mariadb-account-create-update" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.316719 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.351190 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-k94w2"] Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.368773 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-config\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.368814 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.368841 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-svc\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.368869 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.368918 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfk2w\" (UniqueName: \"kubernetes.io/projected/d3b10718-6fb0-4f19-a960-e96ef12eb543-kube-api-access-vfk2w\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.368938 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.470015 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-config\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.470074 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.470111 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-svc\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.470141 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.470214 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfk2w\" (UniqueName: \"kubernetes.io/projected/d3b10718-6fb0-4f19-a960-e96ef12eb543-kube-api-access-vfk2w\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.470242 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.471109 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-config\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.471193 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.471227 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.471405 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-svc\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.471679 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.493259 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfk2w\" (UniqueName: \"kubernetes.io/projected/d3b10718-6fb0-4f19-a960-e96ef12eb543-kube-api-access-vfk2w\") pod \"dnsmasq-dns-895cf5cf-k94w2\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:56 crc kubenswrapper[5014]: I1205 11:07:56.638490 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.105483 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-k94w2"] Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.216262 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.282830 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqmpq\" (UniqueName: \"kubernetes.io/projected/d02ac887-7c2b-4eea-bf6f-795359aa8b14-kube-api-access-kqmpq\") pod \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.283103 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-config-data\") pod \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.283130 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-combined-ca-bundle\") pod \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\" (UID: \"d02ac887-7c2b-4eea-bf6f-795359aa8b14\") " Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.286442 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d02ac887-7c2b-4eea-bf6f-795359aa8b14-kube-api-access-kqmpq" (OuterVolumeSpecName: "kube-api-access-kqmpq") pod "d02ac887-7c2b-4eea-bf6f-795359aa8b14" (UID: "d02ac887-7c2b-4eea-bf6f-795359aa8b14"). InnerVolumeSpecName "kube-api-access-kqmpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.311897 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d02ac887-7c2b-4eea-bf6f-795359aa8b14" (UID: "d02ac887-7c2b-4eea-bf6f-795359aa8b14"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.328452 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-config-data" (OuterVolumeSpecName: "config-data") pod "d02ac887-7c2b-4eea-bf6f-795359aa8b14" (UID: "d02ac887-7c2b-4eea-bf6f-795359aa8b14"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.385587 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.385650 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d02ac887-7c2b-4eea-bf6f-795359aa8b14-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.385667 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqmpq\" (UniqueName: \"kubernetes.io/projected/d02ac887-7c2b-4eea-bf6f-795359aa8b14-kube-api-access-kqmpq\") on node \"crc\" DevicePath \"\"" Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.931921 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-9jkh2" event={"ID":"d02ac887-7c2b-4eea-bf6f-795359aa8b14","Type":"ContainerDied","Data":"5b99837afaf7babf3f048cd34b66a08d57094d81556387972bee0ec63f937381"} Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.932240 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b99837afaf7babf3f048cd34b66a08d57094d81556387972bee0ec63f937381" Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.931951 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-9jkh2" Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.934301 5014 generic.go:334] "Generic (PLEG): container finished" podID="d3b10718-6fb0-4f19-a960-e96ef12eb543" containerID="4d915d01c3d361c0fc38fa4877c4ed205eed8a3ffaf0d767049c6fb0f6ece0ec" exitCode=0 Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.934334 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" event={"ID":"d3b10718-6fb0-4f19-a960-e96ef12eb543","Type":"ContainerDied","Data":"4d915d01c3d361c0fc38fa4877c4ed205eed8a3ffaf0d767049c6fb0f6ece0ec"} Dec 05 11:07:57 crc kubenswrapper[5014]: I1205 11:07:57.934351 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" event={"ID":"d3b10718-6fb0-4f19-a960-e96ef12eb543","Type":"ContainerStarted","Data":"b0e3f2d8ae8e554ec1b14a70cf8983d8c2701074f9fac9b544dbf57a3a74bea4"} Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.185457 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-p6nwv"] Dec 05 11:07:58 crc kubenswrapper[5014]: E1205 11:07:58.185908 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d02ac887-7c2b-4eea-bf6f-795359aa8b14" containerName="keystone-db-sync" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.185928 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d02ac887-7c2b-4eea-bf6f-795359aa8b14" containerName="keystone-db-sync" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.186133 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d02ac887-7c2b-4eea-bf6f-795359aa8b14" containerName="keystone-db-sync" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.187780 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.191065 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.191294 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s452q" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.191359 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.191495 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.191643 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.197736 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-config-data\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.197843 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-combined-ca-bundle\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.197873 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqtjl\" (UniqueName: \"kubernetes.io/projected/e27b6216-5dd7-4d18-a951-1ecb3de69519-kube-api-access-tqtjl\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.198101 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-credential-keys\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.198120 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-scripts\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.198158 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-fernet-keys\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.221400 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-p6nwv"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.238974 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-k94w2"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.299198 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-combined-ca-bundle\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.299526 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqtjl\" (UniqueName: \"kubernetes.io/projected/e27b6216-5dd7-4d18-a951-1ecb3de69519-kube-api-access-tqtjl\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.299646 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-credential-keys\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.299718 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-scripts\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.299809 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-fernet-keys\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.299882 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-config-data\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.393172 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-mbkgq"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.398430 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.403289 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-config-data\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.405085 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-combined-ca-bundle\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.406630 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-scripts\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.411102 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-credential-keys\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.418110 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-fernet-keys\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.426592 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqtjl\" (UniqueName: \"kubernetes.io/projected/e27b6216-5dd7-4d18-a951-1ecb3de69519-kube-api-access-tqtjl\") pod \"keystone-bootstrap-p6nwv\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.427873 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6969968ff7-gxm5f"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.430592 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.439745 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.440098 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.440361 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.440531 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-jrmdm" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.446555 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-mbkgq"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.507713 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.507756 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-config\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.507777 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.507899 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.507970 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.508000 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq9km\" (UniqueName: \"kubernetes.io/projected/fb3011a7-b170-49ed-8d4d-caf19aafaa57-kube-api-access-zq9km\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.507958 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6969968ff7-gxm5f"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.524688 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.613660 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdtmj\" (UniqueName: \"kubernetes.io/projected/a771dd48-18c0-4bbf-be8d-41e06f45789e-kube-api-access-pdtmj\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.613764 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a771dd48-18c0-4bbf-be8d-41e06f45789e-horizon-secret-key\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.613793 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a771dd48-18c0-4bbf-be8d-41e06f45789e-logs\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.613846 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.613876 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-config\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.613901 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.613994 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.614020 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-scripts\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.614052 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-config-data\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.614089 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.614124 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq9km\" (UniqueName: \"kubernetes.io/projected/fb3011a7-b170-49ed-8d4d-caf19aafaa57-kube-api-access-zq9km\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.616687 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-config\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.630111 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.630250 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.630858 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.636408 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-fnq4z"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.637008 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.638014 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.646903 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-fnq4z"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.668682 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.669025 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-cjj6k" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.669151 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.718299 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdtmj\" (UniqueName: \"kubernetes.io/projected/a771dd48-18c0-4bbf-be8d-41e06f45789e-kube-api-access-pdtmj\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.718370 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a771dd48-18c0-4bbf-be8d-41e06f45789e-horizon-secret-key\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.718391 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a771dd48-18c0-4bbf-be8d-41e06f45789e-logs\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.718466 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-scripts\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.718490 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-config-data\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.720265 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-config-data\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.722183 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.727926 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a771dd48-18c0-4bbf-be8d-41e06f45789e-horizon-secret-key\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.728515 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a771dd48-18c0-4bbf-be8d-41e06f45789e-logs\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.728649 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq9km\" (UniqueName: \"kubernetes.io/projected/fb3011a7-b170-49ed-8d4d-caf19aafaa57-kube-api-access-zq9km\") pod \"dnsmasq-dns-6c9c9f998c-mbkgq\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.728969 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-scripts\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.736395 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.766510 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.768713 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.769973 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.804671 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdtmj\" (UniqueName: \"kubernetes.io/projected/a771dd48-18c0-4bbf-be8d-41e06f45789e-kube-api-access-pdtmj\") pod \"horizon-6969968ff7-gxm5f\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.824303 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-combined-ca-bundle\") pod \"neutron-db-sync-fnq4z\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.824415 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcrqn\" (UniqueName: \"kubernetes.io/projected/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-kube-api-access-xcrqn\") pod \"neutron-db-sync-fnq4z\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.824463 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-config\") pod \"neutron-db-sync-fnq4z\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.864939 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-865b65b5c7-qjg6b"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.866449 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.905102 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-2nfpj"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.907079 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.913182 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-kj5jm" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.913428 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.929892 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.930822 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-run-httpd\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.930869 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-config-data\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.930899 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-combined-ca-bundle\") pod \"neutron-db-sync-fnq4z\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.930980 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlqlk\" (UniqueName: \"kubernetes.io/projected/03429d0b-f6d7-4b47-8dd9-475bf3c88881-kube-api-access-rlqlk\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.931004 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcrqn\" (UniqueName: \"kubernetes.io/projected/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-kube-api-access-xcrqn\") pod \"neutron-db-sync-fnq4z\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.931049 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-config\") pod \"neutron-db-sync-fnq4z\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.931077 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-log-httpd\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.931095 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-scripts\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.931121 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.936228 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-combined-ca-bundle\") pod \"neutron-db-sync-fnq4z\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.936717 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-2nfpj"] Dec 05 11:07:58 crc kubenswrapper[5014]: I1205 11:07:58.941181 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-config\") pod \"neutron-db-sync-fnq4z\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.127647 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcrqn\" (UniqueName: \"kubernetes.io/projected/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-kube-api-access-xcrqn\") pod \"neutron-db-sync-fnq4z\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.136553 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.146176 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-865b65b5c7-qjg6b"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.164216 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.168445 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlqlk\" (UniqueName: \"kubernetes.io/projected/03429d0b-f6d7-4b47-8dd9-475bf3c88881-kube-api-access-rlqlk\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.175617 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-log-httpd\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.175679 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-scripts\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.175737 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.175810 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-db-sync-config-data\") pod \"barbican-db-sync-2nfpj\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.175878 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-combined-ca-bundle\") pod \"barbican-db-sync-2nfpj\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.175953 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.176230 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl5zm\" (UniqueName: \"kubernetes.io/projected/7a1f3060-95d4-4b6e-a029-505738f01238-kube-api-access-hl5zm\") pod \"barbican-db-sync-2nfpj\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.176497 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-run-httpd\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.176574 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-config-data\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.169031 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.182627 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-log-httpd\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.182941 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-run-httpd\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.223701 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-scripts\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.228776 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.229288 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-config-data\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.245158 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.248690 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlqlk\" (UniqueName: \"kubernetes.io/projected/03429d0b-f6d7-4b47-8dd9-475bf3c88881-kube-api-access-rlqlk\") pod \"ceilometer-0\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.287777 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-scripts\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.288047 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-db-sync-config-data\") pod \"barbican-db-sync-2nfpj\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.288173 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-config-data\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.288257 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-combined-ca-bundle\") pod \"barbican-db-sync-2nfpj\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.288419 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7407954-f41a-48ad-8cda-8c165c4fb5b8-logs\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.288497 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d7407954-f41a-48ad-8cda-8c165c4fb5b8-horizon-secret-key\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.288622 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl5zm\" (UniqueName: \"kubernetes.io/projected/7a1f3060-95d4-4b6e-a029-505738f01238-kube-api-access-hl5zm\") pod \"barbican-db-sync-2nfpj\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.288771 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz2z2\" (UniqueName: \"kubernetes.io/projected/d7407954-f41a-48ad-8cda-8c165c4fb5b8-kube-api-access-zz2z2\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.295320 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-zshwt"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.296915 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.301536 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.301840 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-5rgsd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.308789 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-db-sync-config-data\") pod \"barbican-db-sync-2nfpj\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.314216 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-zshwt"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.315696 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-combined-ca-bundle\") pod \"barbican-db-sync-2nfpj\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.326031 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.342605 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl5zm\" (UniqueName: \"kubernetes.io/projected/7a1f3060-95d4-4b6e-a029-505738f01238-kube-api-access-hl5zm\") pod \"barbican-db-sync-2nfpj\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398078 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-scripts\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398134 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36756ede-ab38-444f-8f4a-a07da8173882-etc-machine-id\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398180 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz2z2\" (UniqueName: \"kubernetes.io/projected/d7407954-f41a-48ad-8cda-8c165c4fb5b8-kube-api-access-zz2z2\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398234 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrthj\" (UniqueName: \"kubernetes.io/projected/36756ede-ab38-444f-8f4a-a07da8173882-kube-api-access-xrthj\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398330 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-scripts\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398360 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-combined-ca-bundle\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398400 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-config-data\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398431 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7407954-f41a-48ad-8cda-8c165c4fb5b8-logs\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398452 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d7407954-f41a-48ad-8cda-8c165c4fb5b8-horizon-secret-key\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398473 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-db-sync-config-data\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.398507 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-config-data\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.399696 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7407954-f41a-48ad-8cda-8c165c4fb5b8-logs\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.400662 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-config-data\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.405107 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-scripts\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.421583 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-5zmlx"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.423032 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d7407954-f41a-48ad-8cda-8c165c4fb5b8-horizon-secret-key\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.426033 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-5zmlx"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.426064 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-mbkgq"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.426077 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.426319 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.428780 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-r7jbd"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.432186 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.432538 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.433292 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.436853 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.437072 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-ckqjk" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.437095 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.437959 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.443573 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz2z2\" (UniqueName: \"kubernetes.io/projected/d7407954-f41a-48ad-8cda-8c165c4fb5b8-kube-api-access-zz2z2\") pod \"horizon-865b65b5c7-qjg6b\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.445147 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.445198 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.445214 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-r7jbd"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.445331 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.449974 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.450235 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-57g5s" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.450300 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.450384 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.450142 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.500992 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-db-sync-config-data\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.501047 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-config-data\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.501087 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-scripts\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.501112 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36756ede-ab38-444f-8f4a-a07da8173882-etc-machine-id\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.501174 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrthj\" (UniqueName: \"kubernetes.io/projected/36756ede-ab38-444f-8f4a-a07da8173882-kube-api-access-xrthj\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.501245 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-combined-ca-bundle\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.510369 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36756ede-ab38-444f-8f4a-a07da8173882-etc-machine-id\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.513987 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-combined-ca-bundle\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.514780 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-config-data\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.523083 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-scripts\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.528139 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.528694 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-db-sync-config-data\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.555012 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrthj\" (UniqueName: \"kubernetes.io/projected/36756ede-ab38-444f-8f4a-a07da8173882-kube-api-access-xrthj\") pod \"cinder-db-sync-zshwt\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.602788 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-scripts\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.602836 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.602862 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.602885 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.602908 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-combined-ca-bundle\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603106 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603156 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-logs\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603179 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603212 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603240 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603256 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603325 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-scripts\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603347 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-logs\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603367 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp54k\" (UniqueName: \"kubernetes.io/projected/483692d0-4e6f-4f30-a62a-842d34670072-kube-api-access-jp54k\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603441 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603485 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603562 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce61806f-b767-42f7-bef7-e11d70d55086-logs\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603610 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdbwt\" (UniqueName: \"kubernetes.io/projected/daf88673-8a2b-4406-b0bf-18134f39333a-kube-api-access-vdbwt\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603637 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-config\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603697 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603762 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5w55\" (UniqueName: \"kubernetes.io/projected/ce61806f-b767-42f7-bef7-e11d70d55086-kube-api-access-c5w55\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603786 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfqz5\" (UniqueName: \"kubernetes.io/projected/57db7104-f74f-4926-80e1-1a55bf255557-kube-api-access-pfqz5\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603884 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-config-data\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.603962 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.604002 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-config-data\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.656342 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-p6nwv"] Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.662027 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-zshwt" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.705838 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.705884 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp54k\" (UniqueName: \"kubernetes.io/projected/483692d0-4e6f-4f30-a62a-842d34670072-kube-api-access-jp54k\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.705915 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.705942 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce61806f-b767-42f7-bef7-e11d70d55086-logs\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.705960 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdbwt\" (UniqueName: \"kubernetes.io/projected/daf88673-8a2b-4406-b0bf-18134f39333a-kube-api-access-vdbwt\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.705978 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-config\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.705999 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706018 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5w55\" (UniqueName: \"kubernetes.io/projected/ce61806f-b767-42f7-bef7-e11d70d55086-kube-api-access-c5w55\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706035 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfqz5\" (UniqueName: \"kubernetes.io/projected/57db7104-f74f-4926-80e1-1a55bf255557-kube-api-access-pfqz5\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706064 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-config-data\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706089 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706104 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-config-data\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706127 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-scripts\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706147 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706168 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706189 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706213 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-combined-ca-bundle\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706259 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706294 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-logs\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706312 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706333 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706350 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706366 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706383 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-scripts\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.706398 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-logs\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.707055 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.707446 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce61806f-b767-42f7-bef7-e11d70d55086-logs\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.708829 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-logs\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.709146 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.709176 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-config\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.710262 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.712575 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-scripts\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.711569 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.712960 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.713229 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.713981 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.714856 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.716251 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-config-data\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.721869 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-logs\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.725869 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.733394 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-combined-ca-bundle\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.734303 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.739257 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-scripts\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.739474 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.745627 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.752827 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5w55\" (UniqueName: \"kubernetes.io/projected/ce61806f-b767-42f7-bef7-e11d70d55086-kube-api-access-c5w55\") pod \"placement-db-sync-5zmlx\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.759149 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-config-data\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.764071 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfqz5\" (UniqueName: \"kubernetes.io/projected/57db7104-f74f-4926-80e1-1a55bf255557-kube-api-access-pfqz5\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.774123 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdbwt\" (UniqueName: \"kubernetes.io/projected/daf88673-8a2b-4406-b0bf-18134f39333a-kube-api-access-vdbwt\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.776233 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp54k\" (UniqueName: \"kubernetes.io/projected/483692d0-4e6f-4f30-a62a-842d34670072-kube-api-access-jp54k\") pod \"dnsmasq-dns-57c957c4ff-r7jbd\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.782449 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5zmlx" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.806529 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " pod="openstack/glance-default-external-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.811891 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:07:59 crc kubenswrapper[5014]: I1205 11:07:59.999263 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.035461 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.082385 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.198909 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" podUID="d3b10718-6fb0-4f19-a960-e96ef12eb543" containerName="dnsmasq-dns" containerID="cri-o://3cc9e3cea9985e7877087773b6e96904f816cf442969a23cb6dd25634600dca8" gracePeriod=10 Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.199218 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" event={"ID":"d3b10718-6fb0-4f19-a960-e96ef12eb543","Type":"ContainerStarted","Data":"3cc9e3cea9985e7877087773b6e96904f816cf442969a23cb6dd25634600dca8"} Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.199378 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.201015 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-p6nwv" event={"ID":"e27b6216-5dd7-4d18-a951-1ecb3de69519","Type":"ContainerStarted","Data":"2ed52e78f59ec714ff6aa6befe5b792abd8671f4ebd7ea191e7bc36240ec9207"} Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.229932 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-mbkgq"] Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.251661 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-fnq4z"] Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.274613 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" podStartSLOduration=4.274572826 podStartE2EDuration="4.274572826s" podCreationTimestamp="2025-12-05 11:07:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:00.239072316 +0000 UTC m=+1207.187190020" watchObservedRunningTime="2025-12-05 11:08:00.274572826 +0000 UTC m=+1207.222690550" Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.598748 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:08:00 crc kubenswrapper[5014]: W1205 11:08:00.607408 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a1f3060_95d4_4b6e_a029_505738f01238.slice/crio-78cca313b32b18450a8c494326deed6065f2f31eff35c60de225b48cd8e1d371 WatchSource:0}: Error finding container 78cca313b32b18450a8c494326deed6065f2f31eff35c60de225b48cd8e1d371: Status 404 returned error can't find the container with id 78cca313b32b18450a8c494326deed6065f2f31eff35c60de225b48cd8e1d371 Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.608801 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-2nfpj"] Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.655592 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6969968ff7-gxm5f"] Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.683749 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-865b65b5c7-qjg6b"] Dec 05 11:08:00 crc kubenswrapper[5014]: E1205 11:08:00.708534 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3b10718_6fb0_4f19_a960_e96ef12eb543.slice/crio-conmon-3cc9e3cea9985e7877087773b6e96904f816cf442969a23cb6dd25634600dca8.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.728319 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-5zmlx"] Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.736859 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-zshwt"] Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.829623 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-r7jbd"] Dec 05 11:08:00 crc kubenswrapper[5014]: I1205 11:08:00.922328 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.080889 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.213222 5014 generic.go:334] "Generic (PLEG): container finished" podID="d3b10718-6fb0-4f19-a960-e96ef12eb543" containerID="3cc9e3cea9985e7877087773b6e96904f816cf442969a23cb6dd25634600dca8" exitCode=0 Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.213311 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" event={"ID":"d3b10718-6fb0-4f19-a960-e96ef12eb543","Type":"ContainerDied","Data":"3cc9e3cea9985e7877087773b6e96904f816cf442969a23cb6dd25634600dca8"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.215781 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-zshwt" event={"ID":"36756ede-ab38-444f-8f4a-a07da8173882","Type":"ContainerStarted","Data":"a91de4f6f95b643bc5dd95095ddf181d01ca3ee5d968aef4960766f2fb3b2774"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.217546 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-865b65b5c7-qjg6b" event={"ID":"d7407954-f41a-48ad-8cda-8c165c4fb5b8","Type":"ContainerStarted","Data":"76e2b80b41fdec2764ffd4cfcd5d0e0dac5d097db406fa614cd511b1d5e5dd61"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.222124 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" event={"ID":"fb3011a7-b170-49ed-8d4d-caf19aafaa57","Type":"ContainerStarted","Data":"422ffc72b3af08146ab23229e30025e4843e564c81b181ad0404599fe635997d"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.224476 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03429d0b-f6d7-4b47-8dd9-475bf3c88881","Type":"ContainerStarted","Data":"f0b16bb4b105b2b54b146869aa9feb70e3b593b1a9546e95052fd8adc3382cf1"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.225605 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2nfpj" event={"ID":"7a1f3060-95d4-4b6e-a029-505738f01238","Type":"ContainerStarted","Data":"78cca313b32b18450a8c494326deed6065f2f31eff35c60de225b48cd8e1d371"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.227332 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5zmlx" event={"ID":"ce61806f-b767-42f7-bef7-e11d70d55086","Type":"ContainerStarted","Data":"52854b55d129669789029745b4c62b77fbeb4176574d07139b625198f98bccd5"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.229898 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"daf88673-8a2b-4406-b0bf-18134f39333a","Type":"ContainerStarted","Data":"22ec5d9659b0adf7286d1539aac67a34dac7b01e198a15cadb7816a10565debc"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.231740 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57db7104-f74f-4926-80e1-1a55bf255557","Type":"ContainerStarted","Data":"ffa831f941f4d85356eec94ff1bbfde470e706ce95aad5cf6026882fbcc10bc4"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.236648 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-p6nwv" event={"ID":"e27b6216-5dd7-4d18-a951-1ecb3de69519","Type":"ContainerStarted","Data":"75ce8647ee9947930fccb4bfe2c1284131fbb30df25d1af67604e546a21e60fb"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.239915 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" event={"ID":"483692d0-4e6f-4f30-a62a-842d34670072","Type":"ContainerStarted","Data":"889cabd41495db5735f96197d6ee1d2b8612170be51e02edf1c41c58d2325072"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.241115 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6969968ff7-gxm5f" event={"ID":"a771dd48-18c0-4bbf-be8d-41e06f45789e","Type":"ContainerStarted","Data":"6b33f4e240e390162a22c3cd3ca7f0cf6781ffc5d16e8004af46f82b8d9ae9d9"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.243083 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-fnq4z" event={"ID":"eb060c27-a3ff-4233-9c8f-a5614f4ef60b","Type":"ContainerStarted","Data":"f8f46c752ae3707151ef0059d630531aead2e7fdc5de832aeb5656ddaf24ebe4"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.243106 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-fnq4z" event={"ID":"eb060c27-a3ff-4233-9c8f-a5614f4ef60b","Type":"ContainerStarted","Data":"271271af052d87ec67fb7ae992520ff3acf9536b52fb18e0be05b1561b89df33"} Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.257890 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-p6nwv" podStartSLOduration=3.257864128 podStartE2EDuration="3.257864128s" podCreationTimestamp="2025-12-05 11:07:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:01.254913616 +0000 UTC m=+1208.203031320" watchObservedRunningTime="2025-12-05 11:08:01.257864128 +0000 UTC m=+1208.205981832" Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.911335 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.930918 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6969968ff7-gxm5f"] Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.975514 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-757c4b6dbf-492tq"] Dec 05 11:08:01 crc kubenswrapper[5014]: I1205 11:08:01.977318 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.030610 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.034585 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-config-data\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.034686 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/803d83df-f847-425f-895a-4b1ea26e6868-horizon-secret-key\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.034730 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/803d83df-f847-425f-895a-4b1ea26e6868-logs\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.034764 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-scripts\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.034805 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7nsp\" (UniqueName: \"kubernetes.io/projected/803d83df-f847-425f-895a-4b1ea26e6868-kube-api-access-m7nsp\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.046059 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.104394 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.114796 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-757c4b6dbf-492tq"] Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.143789 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-config\") pod \"d3b10718-6fb0-4f19-a960-e96ef12eb543\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.143945 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-sb\") pod \"d3b10718-6fb0-4f19-a960-e96ef12eb543\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.144021 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-swift-storage-0\") pod \"d3b10718-6fb0-4f19-a960-e96ef12eb543\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.144063 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-nb\") pod \"d3b10718-6fb0-4f19-a960-e96ef12eb543\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.144157 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfk2w\" (UniqueName: \"kubernetes.io/projected/d3b10718-6fb0-4f19-a960-e96ef12eb543-kube-api-access-vfk2w\") pod \"d3b10718-6fb0-4f19-a960-e96ef12eb543\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.144226 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-svc\") pod \"d3b10718-6fb0-4f19-a960-e96ef12eb543\" (UID: \"d3b10718-6fb0-4f19-a960-e96ef12eb543\") " Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.144600 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/803d83df-f847-425f-895a-4b1ea26e6868-horizon-secret-key\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.144658 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/803d83df-f847-425f-895a-4b1ea26e6868-logs\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.144693 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-scripts\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.144746 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7nsp\" (UniqueName: \"kubernetes.io/projected/803d83df-f847-425f-895a-4b1ea26e6868-kube-api-access-m7nsp\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.144821 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-config-data\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.146393 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-config-data\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.146715 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/803d83df-f847-425f-895a-4b1ea26e6868-logs\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.147426 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-scripts\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.156600 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3b10718-6fb0-4f19-a960-e96ef12eb543-kube-api-access-vfk2w" (OuterVolumeSpecName: "kube-api-access-vfk2w") pod "d3b10718-6fb0-4f19-a960-e96ef12eb543" (UID: "d3b10718-6fb0-4f19-a960-e96ef12eb543"). InnerVolumeSpecName "kube-api-access-vfk2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.165952 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/803d83df-f847-425f-895a-4b1ea26e6868-horizon-secret-key\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.169620 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7nsp\" (UniqueName: \"kubernetes.io/projected/803d83df-f847-425f-895a-4b1ea26e6868-kube-api-access-m7nsp\") pod \"horizon-757c4b6dbf-492tq\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.247119 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfk2w\" (UniqueName: \"kubernetes.io/projected/d3b10718-6fb0-4f19-a960-e96ef12eb543-kube-api-access-vfk2w\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.251408 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d3b10718-6fb0-4f19-a960-e96ef12eb543" (UID: "d3b10718-6fb0-4f19-a960-e96ef12eb543"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.255005 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d3b10718-6fb0-4f19-a960-e96ef12eb543" (UID: "d3b10718-6fb0-4f19-a960-e96ef12eb543"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.276379 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d3b10718-6fb0-4f19-a960-e96ef12eb543" (UID: "d3b10718-6fb0-4f19-a960-e96ef12eb543"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.280057 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d3b10718-6fb0-4f19-a960-e96ef12eb543" (UID: "d3b10718-6fb0-4f19-a960-e96ef12eb543"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.280963 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-config" (OuterVolumeSpecName: "config") pod "d3b10718-6fb0-4f19-a960-e96ef12eb543" (UID: "d3b10718-6fb0-4f19-a960-e96ef12eb543"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.284527 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57db7104-f74f-4926-80e1-1a55bf255557","Type":"ContainerStarted","Data":"64c697b3923086690589b9e5c2837247386af4e4f8b63604746a01f71a43c35a"} Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.291803 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" event={"ID":"d3b10718-6fb0-4f19-a960-e96ef12eb543","Type":"ContainerDied","Data":"b0e3f2d8ae8e554ec1b14a70cf8983d8c2701074f9fac9b544dbf57a3a74bea4"} Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.291842 5014 scope.go:117] "RemoveContainer" containerID="3cc9e3cea9985e7877087773b6e96904f816cf442969a23cb6dd25634600dca8" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.291985 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-k94w2" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.300643 5014 generic.go:334] "Generic (PLEG): container finished" podID="fb3011a7-b170-49ed-8d4d-caf19aafaa57" containerID="50df0c08cfd65ad39e3d9d9ef661ecea02427caa259d253295fa4e030ec93cc4" exitCode=0 Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.300711 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" event={"ID":"fb3011a7-b170-49ed-8d4d-caf19aafaa57","Type":"ContainerDied","Data":"50df0c08cfd65ad39e3d9d9ef661ecea02427caa259d253295fa4e030ec93cc4"} Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.312876 5014 generic.go:334] "Generic (PLEG): container finished" podID="483692d0-4e6f-4f30-a62a-842d34670072" containerID="569774acfa88baa28e2bebcdcef58e9a75018bec3bed755a48d4e0ab13ac423a" exitCode=0 Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.312940 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" event={"ID":"483692d0-4e6f-4f30-a62a-842d34670072","Type":"ContainerDied","Data":"569774acfa88baa28e2bebcdcef58e9a75018bec3bed755a48d4e0ab13ac423a"} Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.330967 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"daf88673-8a2b-4406-b0bf-18134f39333a","Type":"ContainerStarted","Data":"856e3a0a9a3b56de21b37a48cdcf58aa9ec73f6e75ff405c0f4a4687c0b06944"} Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.350506 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.350538 5014 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.350548 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.350557 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.350565 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3b10718-6fb0-4f19-a960-e96ef12eb543-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.381972 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.388228 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-k94w2"] Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.399914 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-k94w2"] Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.403991 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-fnq4z" podStartSLOduration=4.403925325 podStartE2EDuration="4.403925325s" podCreationTimestamp="2025-12-05 11:07:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:02.372549155 +0000 UTC m=+1209.320666869" watchObservedRunningTime="2025-12-05 11:08:02.403925325 +0000 UTC m=+1209.352043059" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.420953 5014 scope.go:117] "RemoveContainer" containerID="4d915d01c3d361c0fc38fa4877c4ed205eed8a3ffaf0d767049c6fb0f6ece0ec" Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.938181 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:08:02 crc kubenswrapper[5014]: I1205 11:08:02.938843 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.004241 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.079305 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-sb\") pod \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.079422 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-nb\") pod \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.079462 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zq9km\" (UniqueName: \"kubernetes.io/projected/fb3011a7-b170-49ed-8d4d-caf19aafaa57-kube-api-access-zq9km\") pod \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.079524 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-swift-storage-0\") pod \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.079577 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-svc\") pod \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.079711 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-config\") pod \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\" (UID: \"fb3011a7-b170-49ed-8d4d-caf19aafaa57\") " Dec 05 11:08:03 crc kubenswrapper[5014]: W1205 11:08:03.094455 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod803d83df_f847_425f_895a_4b1ea26e6868.slice/crio-b027654e3da5ff07fc9103670a6e17536a905eb7cc099ebba5e529394c2da67b WatchSource:0}: Error finding container b027654e3da5ff07fc9103670a6e17536a905eb7cc099ebba5e529394c2da67b: Status 404 returned error can't find the container with id b027654e3da5ff07fc9103670a6e17536a905eb7cc099ebba5e529394c2da67b Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.109005 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb3011a7-b170-49ed-8d4d-caf19aafaa57-kube-api-access-zq9km" (OuterVolumeSpecName: "kube-api-access-zq9km") pod "fb3011a7-b170-49ed-8d4d-caf19aafaa57" (UID: "fb3011a7-b170-49ed-8d4d-caf19aafaa57"). InnerVolumeSpecName "kube-api-access-zq9km". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.121869 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-757c4b6dbf-492tq"] Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.130650 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fb3011a7-b170-49ed-8d4d-caf19aafaa57" (UID: "fb3011a7-b170-49ed-8d4d-caf19aafaa57"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.154107 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fb3011a7-b170-49ed-8d4d-caf19aafaa57" (UID: "fb3011a7-b170-49ed-8d4d-caf19aafaa57"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.156901 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fb3011a7-b170-49ed-8d4d-caf19aafaa57" (UID: "fb3011a7-b170-49ed-8d4d-caf19aafaa57"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.158200 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fb3011a7-b170-49ed-8d4d-caf19aafaa57" (UID: "fb3011a7-b170-49ed-8d4d-caf19aafaa57"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.158920 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-config" (OuterVolumeSpecName: "config") pod "fb3011a7-b170-49ed-8d4d-caf19aafaa57" (UID: "fb3011a7-b170-49ed-8d4d-caf19aafaa57"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.185068 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.185106 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.185118 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zq9km\" (UniqueName: \"kubernetes.io/projected/fb3011a7-b170-49ed-8d4d-caf19aafaa57-kube-api-access-zq9km\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.185132 5014 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.185145 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.185158 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb3011a7-b170-49ed-8d4d-caf19aafaa57-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.337834 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3b10718-6fb0-4f19-a960-e96ef12eb543" path="/var/lib/kubelet/pods/d3b10718-6fb0-4f19-a960-e96ef12eb543/volumes" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.365214 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-757c4b6dbf-492tq" event={"ID":"803d83df-f847-425f-895a-4b1ea26e6868","Type":"ContainerStarted","Data":"b027654e3da5ff07fc9103670a6e17536a905eb7cc099ebba5e529394c2da67b"} Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.369200 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" event={"ID":"fb3011a7-b170-49ed-8d4d-caf19aafaa57","Type":"ContainerDied","Data":"422ffc72b3af08146ab23229e30025e4843e564c81b181ad0404599fe635997d"} Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.369264 5014 scope.go:117] "RemoveContainer" containerID="50df0c08cfd65ad39e3d9d9ef661ecea02427caa259d253295fa4e030ec93cc4" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.369479 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-mbkgq" Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.590444 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-mbkgq"] Dec 05 11:08:03 crc kubenswrapper[5014]: I1205 11:08:03.645124 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-mbkgq"] Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.392031 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" event={"ID":"483692d0-4e6f-4f30-a62a-842d34670072","Type":"ContainerStarted","Data":"30d152ca4d5b905f15a3c91004df94ab14d5d209d3c96c2d3e1789c65ee7102c"} Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.393627 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.404150 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"daf88673-8a2b-4406-b0bf-18134f39333a","Type":"ContainerStarted","Data":"98126259c81017bb32e195ef6647d87e1b5d0076223535fccc2a0c3ca8b28d41"} Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.404357 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="daf88673-8a2b-4406-b0bf-18134f39333a" containerName="glance-log" containerID="cri-o://856e3a0a9a3b56de21b37a48cdcf58aa9ec73f6e75ff405c0f4a4687c0b06944" gracePeriod=30 Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.404460 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="daf88673-8a2b-4406-b0bf-18134f39333a" containerName="glance-httpd" containerID="cri-o://98126259c81017bb32e195ef6647d87e1b5d0076223535fccc2a0c3ca8b28d41" gracePeriod=30 Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.421214 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="57db7104-f74f-4926-80e1-1a55bf255557" containerName="glance-log" containerID="cri-o://64c697b3923086690589b9e5c2837247386af4e4f8b63604746a01f71a43c35a" gracePeriod=30 Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.421432 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57db7104-f74f-4926-80e1-1a55bf255557","Type":"ContainerStarted","Data":"f31c87970abdee5996c12386a4bc1a416df27334af46094def46a27d79167314"} Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.421507 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="57db7104-f74f-4926-80e1-1a55bf255557" containerName="glance-httpd" containerID="cri-o://f31c87970abdee5996c12386a4bc1a416df27334af46094def46a27d79167314" gracePeriod=30 Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.425037 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" podStartSLOduration=5.4250188 podStartE2EDuration="5.4250188s" podCreationTimestamp="2025-12-05 11:07:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:04.422051077 +0000 UTC m=+1211.370168801" watchObservedRunningTime="2025-12-05 11:08:04.4250188 +0000 UTC m=+1211.373136504" Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.467136 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.467115881 podStartE2EDuration="5.467115881s" podCreationTimestamp="2025-12-05 11:07:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:04.455688373 +0000 UTC m=+1211.403806087" watchObservedRunningTime="2025-12-05 11:08:04.467115881 +0000 UTC m=+1211.415233585" Dec 05 11:08:04 crc kubenswrapper[5014]: I1205 11:08:04.494713 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.494693509 podStartE2EDuration="5.494693509s" podCreationTimestamp="2025-12-05 11:07:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:04.479859479 +0000 UTC m=+1211.427977204" watchObservedRunningTime="2025-12-05 11:08:04.494693509 +0000 UTC m=+1211.442811213" Dec 05 11:08:05 crc kubenswrapper[5014]: I1205 11:08:05.332450 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb3011a7-b170-49ed-8d4d-caf19aafaa57" path="/var/lib/kubelet/pods/fb3011a7-b170-49ed-8d4d-caf19aafaa57/volumes" Dec 05 11:08:05 crc kubenswrapper[5014]: I1205 11:08:05.443106 5014 generic.go:334] "Generic (PLEG): container finished" podID="daf88673-8a2b-4406-b0bf-18134f39333a" containerID="98126259c81017bb32e195ef6647d87e1b5d0076223535fccc2a0c3ca8b28d41" exitCode=0 Dec 05 11:08:05 crc kubenswrapper[5014]: I1205 11:08:05.443140 5014 generic.go:334] "Generic (PLEG): container finished" podID="daf88673-8a2b-4406-b0bf-18134f39333a" containerID="856e3a0a9a3b56de21b37a48cdcf58aa9ec73f6e75ff405c0f4a4687c0b06944" exitCode=143 Dec 05 11:08:05 crc kubenswrapper[5014]: I1205 11:08:05.443190 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"daf88673-8a2b-4406-b0bf-18134f39333a","Type":"ContainerDied","Data":"98126259c81017bb32e195ef6647d87e1b5d0076223535fccc2a0c3ca8b28d41"} Dec 05 11:08:05 crc kubenswrapper[5014]: I1205 11:08:05.443224 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"daf88673-8a2b-4406-b0bf-18134f39333a","Type":"ContainerDied","Data":"856e3a0a9a3b56de21b37a48cdcf58aa9ec73f6e75ff405c0f4a4687c0b06944"} Dec 05 11:08:05 crc kubenswrapper[5014]: I1205 11:08:05.461626 5014 generic.go:334] "Generic (PLEG): container finished" podID="57db7104-f74f-4926-80e1-1a55bf255557" containerID="f31c87970abdee5996c12386a4bc1a416df27334af46094def46a27d79167314" exitCode=0 Dec 05 11:08:05 crc kubenswrapper[5014]: I1205 11:08:05.461666 5014 generic.go:334] "Generic (PLEG): container finished" podID="57db7104-f74f-4926-80e1-1a55bf255557" containerID="64c697b3923086690589b9e5c2837247386af4e4f8b63604746a01f71a43c35a" exitCode=143 Dec 05 11:08:05 crc kubenswrapper[5014]: I1205 11:08:05.461683 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57db7104-f74f-4926-80e1-1a55bf255557","Type":"ContainerDied","Data":"f31c87970abdee5996c12386a4bc1a416df27334af46094def46a27d79167314"} Dec 05 11:08:05 crc kubenswrapper[5014]: I1205 11:08:05.461767 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57db7104-f74f-4926-80e1-1a55bf255557","Type":"ContainerDied","Data":"64c697b3923086690589b9e5c2837247386af4e4f8b63604746a01f71a43c35a"} Dec 05 11:08:06 crc kubenswrapper[5014]: I1205 11:08:06.479304 5014 generic.go:334] "Generic (PLEG): container finished" podID="e27b6216-5dd7-4d18-a951-1ecb3de69519" containerID="75ce8647ee9947930fccb4bfe2c1284131fbb30df25d1af67604e546a21e60fb" exitCode=0 Dec 05 11:08:06 crc kubenswrapper[5014]: I1205 11:08:06.481471 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-p6nwv" event={"ID":"e27b6216-5dd7-4d18-a951-1ecb3de69519","Type":"ContainerDied","Data":"75ce8647ee9947930fccb4bfe2c1284131fbb30df25d1af67604e546a21e60fb"} Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.688668 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.863435 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-scripts\") pod \"57db7104-f74f-4926-80e1-1a55bf255557\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.863553 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-httpd-run\") pod \"57db7104-f74f-4926-80e1-1a55bf255557\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.863591 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfqz5\" (UniqueName: \"kubernetes.io/projected/57db7104-f74f-4926-80e1-1a55bf255557-kube-api-access-pfqz5\") pod \"57db7104-f74f-4926-80e1-1a55bf255557\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.863659 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"57db7104-f74f-4926-80e1-1a55bf255557\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.863724 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-combined-ca-bundle\") pod \"57db7104-f74f-4926-80e1-1a55bf255557\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.863761 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-logs\") pod \"57db7104-f74f-4926-80e1-1a55bf255557\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.863813 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-config-data\") pod \"57db7104-f74f-4926-80e1-1a55bf255557\" (UID: \"57db7104-f74f-4926-80e1-1a55bf255557\") " Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.864591 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "57db7104-f74f-4926-80e1-1a55bf255557" (UID: "57db7104-f74f-4926-80e1-1a55bf255557"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.864701 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-logs" (OuterVolumeSpecName: "logs") pod "57db7104-f74f-4926-80e1-1a55bf255557" (UID: "57db7104-f74f-4926-80e1-1a55bf255557"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.870995 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "57db7104-f74f-4926-80e1-1a55bf255557" (UID: "57db7104-f74f-4926-80e1-1a55bf255557"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.872823 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-scripts" (OuterVolumeSpecName: "scripts") pod "57db7104-f74f-4926-80e1-1a55bf255557" (UID: "57db7104-f74f-4926-80e1-1a55bf255557"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.915063 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57db7104-f74f-4926-80e1-1a55bf255557-kube-api-access-pfqz5" (OuterVolumeSpecName: "kube-api-access-pfqz5") pod "57db7104-f74f-4926-80e1-1a55bf255557" (UID: "57db7104-f74f-4926-80e1-1a55bf255557"). InnerVolumeSpecName "kube-api-access-pfqz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.917317 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57db7104-f74f-4926-80e1-1a55bf255557" (UID: "57db7104-f74f-4926-80e1-1a55bf255557"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.927791 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-config-data" (OuterVolumeSpecName: "config-data") pod "57db7104-f74f-4926-80e1-1a55bf255557" (UID: "57db7104-f74f-4926-80e1-1a55bf255557"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.965738 5014 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.965773 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.965785 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.965793 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.965805 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57db7104-f74f-4926-80e1-1a55bf255557-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.965814 5014 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57db7104-f74f-4926-80e1-1a55bf255557-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:07 crc kubenswrapper[5014]: I1205 11:08:07.965825 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfqz5\" (UniqueName: \"kubernetes.io/projected/57db7104-f74f-4926-80e1-1a55bf255557-kube-api-access-pfqz5\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.004457 5014 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.069715 5014 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.495717 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57db7104-f74f-4926-80e1-1a55bf255557","Type":"ContainerDied","Data":"ffa831f941f4d85356eec94ff1bbfde470e706ce95aad5cf6026882fbcc10bc4"} Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.495800 5014 scope.go:117] "RemoveContainer" containerID="f31c87970abdee5996c12386a4bc1a416df27334af46094def46a27d79167314" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.496164 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.538660 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.548585 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.564749 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:08 crc kubenswrapper[5014]: E1205 11:08:08.565651 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3011a7-b170-49ed-8d4d-caf19aafaa57" containerName="init" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.565674 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3011a7-b170-49ed-8d4d-caf19aafaa57" containerName="init" Dec 05 11:08:08 crc kubenswrapper[5014]: E1205 11:08:08.565711 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3b10718-6fb0-4f19-a960-e96ef12eb543" containerName="init" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.565719 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3b10718-6fb0-4f19-a960-e96ef12eb543" containerName="init" Dec 05 11:08:08 crc kubenswrapper[5014]: E1205 11:08:08.565732 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57db7104-f74f-4926-80e1-1a55bf255557" containerName="glance-log" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.565739 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="57db7104-f74f-4926-80e1-1a55bf255557" containerName="glance-log" Dec 05 11:08:08 crc kubenswrapper[5014]: E1205 11:08:08.565746 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57db7104-f74f-4926-80e1-1a55bf255557" containerName="glance-httpd" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.565751 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="57db7104-f74f-4926-80e1-1a55bf255557" containerName="glance-httpd" Dec 05 11:08:08 crc kubenswrapper[5014]: E1205 11:08:08.565764 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3b10718-6fb0-4f19-a960-e96ef12eb543" containerName="dnsmasq-dns" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.565771 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3b10718-6fb0-4f19-a960-e96ef12eb543" containerName="dnsmasq-dns" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.565937 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="57db7104-f74f-4926-80e1-1a55bf255557" containerName="glance-log" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.565952 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3b10718-6fb0-4f19-a960-e96ef12eb543" containerName="dnsmasq-dns" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.565967 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="57db7104-f74f-4926-80e1-1a55bf255557" containerName="glance-httpd" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.565978 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb3011a7-b170-49ed-8d4d-caf19aafaa57" containerName="init" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.567008 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.569872 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.580656 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.588236 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.682333 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.682421 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-scripts\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.682629 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.682733 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knvpr\" (UniqueName: \"kubernetes.io/projected/70fdbc17-8002-4e0e-82fd-854c962ad0e9-kube-api-access-knvpr\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.682807 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-config-data\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.682838 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-logs\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.682899 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.682982 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.785158 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.785598 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.785631 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-scripts\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.785720 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.785949 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knvpr\" (UniqueName: \"kubernetes.io/projected/70fdbc17-8002-4e0e-82fd-854c962ad0e9-kube-api-access-knvpr\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.786430 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-config-data\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.786455 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-logs\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.786505 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.786654 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.786729 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.786968 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-logs\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.792263 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-scripts\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.792351 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.803908 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-config-data\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.808718 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knvpr\" (UniqueName: \"kubernetes.io/projected/70fdbc17-8002-4e0e-82fd-854c962ad0e9-kube-api-access-knvpr\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.809122 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:08 crc kubenswrapper[5014]: I1205 11:08:08.815123 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:09 crc kubenswrapper[5014]: I1205 11:08:09.060910 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:08:09 crc kubenswrapper[5014]: I1205 11:08:09.339753 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57db7104-f74f-4926-80e1-1a55bf255557" path="/var/lib/kubelet/pods/57db7104-f74f-4926-80e1-1a55bf255557/volumes" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.039016 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.118923 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-zjmn4"] Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.119195 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerName="dnsmasq-dns" containerID="cri-o://ca45bc629a54bb9822178789a6eaa7dbd592e9a1e24ba9bc6b7292973ca89575" gracePeriod=10 Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.217238 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-865b65b5c7-qjg6b"] Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.244759 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.253666 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5dd6878f44-n5k2l"] Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.255792 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.259653 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.305497 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-757c4b6dbf-492tq"] Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.357415 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-combined-ca-bundle\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.368784 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-tls-certs\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.368859 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-secret-key\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.368880 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhqlp\" (UniqueName: \"kubernetes.io/projected/b3d0ec93-b994-4bc6-9a86-7085e79c7208-kube-api-access-fhqlp\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.368949 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-config-data\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.369218 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-scripts\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.369322 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3d0ec93-b994-4bc6-9a86-7085e79c7208-logs\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.372528 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5dd6878f44-n5k2l"] Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.437152 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-575d445b9b-l7wlc"] Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.440471 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.467237 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-575d445b9b-l7wlc"] Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.474113 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-scripts\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.474185 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3d0ec93-b994-4bc6-9a86-7085e79c7208-logs\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.474372 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-combined-ca-bundle\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.474466 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-tls-certs\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.474513 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-secret-key\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.474550 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhqlp\" (UniqueName: \"kubernetes.io/projected/b3d0ec93-b994-4bc6-9a86-7085e79c7208-kube-api-access-fhqlp\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.474599 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-config-data\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.481754 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-scripts\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.481995 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3d0ec93-b994-4bc6-9a86-7085e79c7208-logs\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.485092 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-config-data\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.488874 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-tls-certs\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.491435 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-combined-ca-bundle\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.519066 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhqlp\" (UniqueName: \"kubernetes.io/projected/b3d0ec93-b994-4bc6-9a86-7085e79c7208-kube-api-access-fhqlp\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.521981 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-secret-key\") pod \"horizon-5dd6878f44-n5k2l\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.584727 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b5b07bd8-c674-4647-a09b-eae67ddad491-horizon-secret-key\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.584787 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b5b07bd8-c674-4647-a09b-eae67ddad491-config-data\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.584834 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2f9c\" (UniqueName: \"kubernetes.io/projected/b5b07bd8-c674-4647-a09b-eae67ddad491-kube-api-access-h2f9c\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.584875 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b07bd8-c674-4647-a09b-eae67ddad491-logs\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.585037 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5b07bd8-c674-4647-a09b-eae67ddad491-horizon-tls-certs\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.585222 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5b07bd8-c674-4647-a09b-eae67ddad491-scripts\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.585244 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b07bd8-c674-4647-a09b-eae67ddad491-combined-ca-bundle\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.616582 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.686994 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5b07bd8-c674-4647-a09b-eae67ddad491-scripts\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.687050 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b07bd8-c674-4647-a09b-eae67ddad491-combined-ca-bundle\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.687123 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b5b07bd8-c674-4647-a09b-eae67ddad491-horizon-secret-key\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.687143 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b5b07bd8-c674-4647-a09b-eae67ddad491-config-data\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.687169 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2f9c\" (UniqueName: \"kubernetes.io/projected/b5b07bd8-c674-4647-a09b-eae67ddad491-kube-api-access-h2f9c\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.687188 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b07bd8-c674-4647-a09b-eae67ddad491-logs\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.687248 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5b07bd8-c674-4647-a09b-eae67ddad491-horizon-tls-certs\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.688009 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b07bd8-c674-4647-a09b-eae67ddad491-logs\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.689553 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5b07bd8-c674-4647-a09b-eae67ddad491-scripts\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.691474 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b5b07bd8-c674-4647-a09b-eae67ddad491-config-data\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.696485 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b07bd8-c674-4647-a09b-eae67ddad491-combined-ca-bundle\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.699910 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b5b07bd8-c674-4647-a09b-eae67ddad491-horizon-secret-key\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.700424 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5b07bd8-c674-4647-a09b-eae67ddad491-horizon-tls-certs\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.709785 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2f9c\" (UniqueName: \"kubernetes.io/projected/b5b07bd8-c674-4647-a09b-eae67ddad491-kube-api-access-h2f9c\") pod \"horizon-575d445b9b-l7wlc\" (UID: \"b5b07bd8-c674-4647-a09b-eae67ddad491\") " pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:10 crc kubenswrapper[5014]: I1205 11:08:10.776575 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:11 crc kubenswrapper[5014]: E1205 11:08:11.003455 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8eb8cd04_c5a2_4a43_8648_80e74478ec75.slice/crio-conmon-ca45bc629a54bb9822178789a6eaa7dbd592e9a1e24ba9bc6b7292973ca89575.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:08:11 crc kubenswrapper[5014]: I1205 11:08:11.554591 5014 generic.go:334] "Generic (PLEG): container finished" podID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerID="ca45bc629a54bb9822178789a6eaa7dbd592e9a1e24ba9bc6b7292973ca89575" exitCode=0 Dec 05 11:08:11 crc kubenswrapper[5014]: I1205 11:08:11.554610 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" event={"ID":"8eb8cd04-c5a2-4a43-8648-80e74478ec75","Type":"ContainerDied","Data":"ca45bc629a54bb9822178789a6eaa7dbd592e9a1e24ba9bc6b7292973ca89575"} Dec 05 11:08:13 crc kubenswrapper[5014]: I1205 11:08:13.202528 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: connect: connection refused" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.798297 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.819707 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.981335 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-config-data\") pod \"e27b6216-5dd7-4d18-a951-1ecb3de69519\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.981383 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-combined-ca-bundle\") pod \"e27b6216-5dd7-4d18-a951-1ecb3de69519\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.981415 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"daf88673-8a2b-4406-b0bf-18134f39333a\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.981439 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-config-data\") pod \"daf88673-8a2b-4406-b0bf-18134f39333a\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.981518 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdbwt\" (UniqueName: \"kubernetes.io/projected/daf88673-8a2b-4406-b0bf-18134f39333a-kube-api-access-vdbwt\") pod \"daf88673-8a2b-4406-b0bf-18134f39333a\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.981552 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-httpd-run\") pod \"daf88673-8a2b-4406-b0bf-18134f39333a\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.981591 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-fernet-keys\") pod \"e27b6216-5dd7-4d18-a951-1ecb3de69519\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.981624 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-scripts\") pod \"daf88673-8a2b-4406-b0bf-18134f39333a\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.982195 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "daf88673-8a2b-4406-b0bf-18134f39333a" (UID: "daf88673-8a2b-4406-b0bf-18134f39333a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.982763 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-combined-ca-bundle\") pod \"daf88673-8a2b-4406-b0bf-18134f39333a\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.982797 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-credential-keys\") pod \"e27b6216-5dd7-4d18-a951-1ecb3de69519\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.982853 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-scripts\") pod \"e27b6216-5dd7-4d18-a951-1ecb3de69519\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.982898 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqtjl\" (UniqueName: \"kubernetes.io/projected/e27b6216-5dd7-4d18-a951-1ecb3de69519-kube-api-access-tqtjl\") pod \"e27b6216-5dd7-4d18-a951-1ecb3de69519\" (UID: \"e27b6216-5dd7-4d18-a951-1ecb3de69519\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.982980 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-logs\") pod \"daf88673-8a2b-4406-b0bf-18134f39333a\" (UID: \"daf88673-8a2b-4406-b0bf-18134f39333a\") " Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.984003 5014 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.984786 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-logs" (OuterVolumeSpecName: "logs") pod "daf88673-8a2b-4406-b0bf-18134f39333a" (UID: "daf88673-8a2b-4406-b0bf-18134f39333a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.989433 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-scripts" (OuterVolumeSpecName: "scripts") pod "e27b6216-5dd7-4d18-a951-1ecb3de69519" (UID: "e27b6216-5dd7-4d18-a951-1ecb3de69519"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.989531 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daf88673-8a2b-4406-b0bf-18134f39333a-kube-api-access-vdbwt" (OuterVolumeSpecName: "kube-api-access-vdbwt") pod "daf88673-8a2b-4406-b0bf-18134f39333a" (UID: "daf88673-8a2b-4406-b0bf-18134f39333a"). InnerVolumeSpecName "kube-api-access-vdbwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.989592 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e27b6216-5dd7-4d18-a951-1ecb3de69519-kube-api-access-tqtjl" (OuterVolumeSpecName: "kube-api-access-tqtjl") pod "e27b6216-5dd7-4d18-a951-1ecb3de69519" (UID: "e27b6216-5dd7-4d18-a951-1ecb3de69519"). InnerVolumeSpecName "kube-api-access-tqtjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.991859 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "daf88673-8a2b-4406-b0bf-18134f39333a" (UID: "daf88673-8a2b-4406-b0bf-18134f39333a"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.992046 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e27b6216-5dd7-4d18-a951-1ecb3de69519" (UID: "e27b6216-5dd7-4d18-a951-1ecb3de69519"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:14 crc kubenswrapper[5014]: I1205 11:08:14.998484 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-scripts" (OuterVolumeSpecName: "scripts") pod "daf88673-8a2b-4406-b0bf-18134f39333a" (UID: "daf88673-8a2b-4406-b0bf-18134f39333a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.001295 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e27b6216-5dd7-4d18-a951-1ecb3de69519" (UID: "e27b6216-5dd7-4d18-a951-1ecb3de69519"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.021825 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e27b6216-5dd7-4d18-a951-1ecb3de69519" (UID: "e27b6216-5dd7-4d18-a951-1ecb3de69519"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.022489 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-config-data" (OuterVolumeSpecName: "config-data") pod "e27b6216-5dd7-4d18-a951-1ecb3de69519" (UID: "e27b6216-5dd7-4d18-a951-1ecb3de69519"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.025484 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "daf88673-8a2b-4406-b0bf-18134f39333a" (UID: "daf88673-8a2b-4406-b0bf-18134f39333a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.054023 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-config-data" (OuterVolumeSpecName: "config-data") pod "daf88673-8a2b-4406-b0bf-18134f39333a" (UID: "daf88673-8a2b-4406-b0bf-18134f39333a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085349 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdbwt\" (UniqueName: \"kubernetes.io/projected/daf88673-8a2b-4406-b0bf-18134f39333a-kube-api-access-vdbwt\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085390 5014 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085403 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085414 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085426 5014 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085436 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085448 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqtjl\" (UniqueName: \"kubernetes.io/projected/e27b6216-5dd7-4d18-a951-1ecb3de69519-kube-api-access-tqtjl\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085459 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daf88673-8a2b-4406-b0bf-18134f39333a-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085469 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085479 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e27b6216-5dd7-4d18-a951-1ecb3de69519-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085517 5014 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.085529 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daf88673-8a2b-4406-b0bf-18134f39333a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.111349 5014 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.186942 5014 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.590399 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-p6nwv" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.590394 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-p6nwv" event={"ID":"e27b6216-5dd7-4d18-a951-1ecb3de69519","Type":"ContainerDied","Data":"2ed52e78f59ec714ff6aa6befe5b792abd8671f4ebd7ea191e7bc36240ec9207"} Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.590515 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ed52e78f59ec714ff6aa6befe5b792abd8671f4ebd7ea191e7bc36240ec9207" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.598546 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"daf88673-8a2b-4406-b0bf-18134f39333a","Type":"ContainerDied","Data":"22ec5d9659b0adf7286d1539aac67a34dac7b01e198a15cadb7816a10565debc"} Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.598628 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.629175 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.639020 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.652080 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:08:15 crc kubenswrapper[5014]: E1205 11:08:15.654553 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e27b6216-5dd7-4d18-a951-1ecb3de69519" containerName="keystone-bootstrap" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.654577 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e27b6216-5dd7-4d18-a951-1ecb3de69519" containerName="keystone-bootstrap" Dec 05 11:08:15 crc kubenswrapper[5014]: E1205 11:08:15.654604 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daf88673-8a2b-4406-b0bf-18134f39333a" containerName="glance-log" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.654613 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="daf88673-8a2b-4406-b0bf-18134f39333a" containerName="glance-log" Dec 05 11:08:15 crc kubenswrapper[5014]: E1205 11:08:15.654636 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daf88673-8a2b-4406-b0bf-18134f39333a" containerName="glance-httpd" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.654643 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="daf88673-8a2b-4406-b0bf-18134f39333a" containerName="glance-httpd" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.654856 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="e27b6216-5dd7-4d18-a951-1ecb3de69519" containerName="keystone-bootstrap" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.654881 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="daf88673-8a2b-4406-b0bf-18134f39333a" containerName="glance-httpd" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.654896 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="daf88673-8a2b-4406-b0bf-18134f39333a" containerName="glance-log" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.655968 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.657745 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.661044 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.666702 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.797974 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-logs\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.798029 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.798069 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.798815 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.798898 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.798993 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssmph\" (UniqueName: \"kubernetes.io/projected/0ee1ae65-148c-40e6-afc5-8526bade7971-kube-api-access-ssmph\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.799075 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.799237 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.900503 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.900783 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.900839 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.900856 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.900883 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssmph\" (UniqueName: \"kubernetes.io/projected/0ee1ae65-148c-40e6-afc5-8526bade7971-kube-api-access-ssmph\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.900904 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.901241 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.901048 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.901339 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-logs\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.901683 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.901983 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-logs\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.902514 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-p6nwv"] Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.906156 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.907397 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.911563 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.912946 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-p6nwv"] Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.918850 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.923615 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssmph\" (UniqueName: \"kubernetes.io/projected/0ee1ae65-148c-40e6-afc5-8526bade7971-kube-api-access-ssmph\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.929947 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:08:15 crc kubenswrapper[5014]: I1205 11:08:15.982043 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.007057 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-cw4lt"] Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.008336 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.012690 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.012947 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.013078 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.013191 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s452q" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.013933 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.027489 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cw4lt"] Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.104406 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-credential-keys\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.104763 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-fernet-keys\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.104876 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-combined-ca-bundle\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.104944 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-scripts\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.104996 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67wrd\" (UniqueName: \"kubernetes.io/projected/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-kube-api-access-67wrd\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.105169 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-config-data\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.207373 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-fernet-keys\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.207455 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-combined-ca-bundle\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.207494 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-scripts\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.207532 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67wrd\" (UniqueName: \"kubernetes.io/projected/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-kube-api-access-67wrd\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.207567 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-config-data\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.207661 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-credential-keys\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.211241 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-fernet-keys\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.213284 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-credential-keys\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.214348 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-scripts\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.221416 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-combined-ca-bundle\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.224225 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67wrd\" (UniqueName: \"kubernetes.io/projected/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-kube-api-access-67wrd\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.225663 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-config-data\") pod \"keystone-bootstrap-cw4lt\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:16 crc kubenswrapper[5014]: I1205 11:08:16.338491 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:17 crc kubenswrapper[5014]: I1205 11:08:17.332687 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daf88673-8a2b-4406-b0bf-18134f39333a" path="/var/lib/kubelet/pods/daf88673-8a2b-4406-b0bf-18134f39333a/volumes" Dec 05 11:08:17 crc kubenswrapper[5014]: I1205 11:08:17.334388 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e27b6216-5dd7-4d18-a951-1ecb3de69519" path="/var/lib/kubelet/pods/e27b6216-5dd7-4d18-a951-1ecb3de69519/volumes" Dec 05 11:08:18 crc kubenswrapper[5014]: I1205 11:08:18.202112 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: connect: connection refused" Dec 05 11:08:18 crc kubenswrapper[5014]: E1205 11:08:18.678175 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 05 11:08:18 crc kubenswrapper[5014]: E1205 11:08:18.678354 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hl5zm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-2nfpj_openstack(7a1f3060-95d4-4b6e-a029-505738f01238): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:08:18 crc kubenswrapper[5014]: E1205 11:08:18.679718 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-2nfpj" podUID="7a1f3060-95d4-4b6e-a029-505738f01238" Dec 05 11:08:18 crc kubenswrapper[5014]: E1205 11:08:18.694800 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 11:08:18 crc kubenswrapper[5014]: E1205 11:08:18.694995 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n546h65bh5ffh574h77h5cfhc9h678h559h75h66ch6bh544h66bh68ch5ffh584h66bh5b6h66h587h8ch7ch685h678h645h598h65fh646h58fh578h664q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pdtmj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6969968ff7-gxm5f_openstack(a771dd48-18c0-4bbf-be8d-41e06f45789e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:08:18 crc kubenswrapper[5014]: E1205 11:08:18.698984 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6969968ff7-gxm5f" podUID="a771dd48-18c0-4bbf-be8d-41e06f45789e" Dec 05 11:08:19 crc kubenswrapper[5014]: E1205 11:08:19.635098 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-2nfpj" podUID="7a1f3060-95d4-4b6e-a029-505738f01238" Dec 05 11:08:28 crc kubenswrapper[5014]: I1205 11:08:28.202353 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Dec 05 11:08:28 crc kubenswrapper[5014]: I1205 11:08:28.203343 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.585847 5014 scope.go:117] "RemoveContainer" containerID="64c697b3923086690589b9e5c2837247386af4e4f8b63604746a01f71a43c35a" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.726470 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6969968ff7-gxm5f" event={"ID":"a771dd48-18c0-4bbf-be8d-41e06f45789e","Type":"ContainerDied","Data":"6b33f4e240e390162a22c3cd3ca7f0cf6781ffc5d16e8004af46f82b8d9ae9d9"} Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.726835 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b33f4e240e390162a22c3cd3ca7f0cf6781ffc5d16e8004af46f82b8d9ae9d9" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.731258 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" event={"ID":"8eb8cd04-c5a2-4a43-8648-80e74478ec75","Type":"ContainerDied","Data":"13bcf29c66eb5e9bf7546f4326cc7dce83b29b8c6e0d5d2107645fd0c68f696a"} Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.731337 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13bcf29c66eb5e9bf7546f4326cc7dce83b29b8c6e0d5d2107645fd0c68f696a" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.762169 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.769507 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.873883 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-scripts\") pod \"a771dd48-18c0-4bbf-be8d-41e06f45789e\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.874745 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-scripts" (OuterVolumeSpecName: "scripts") pod "a771dd48-18c0-4bbf-be8d-41e06f45789e" (UID: "a771dd48-18c0-4bbf-be8d-41e06f45789e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.874813 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-config-data\") pod \"a771dd48-18c0-4bbf-be8d-41e06f45789e\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.875441 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-config-data" (OuterVolumeSpecName: "config-data") pod "a771dd48-18c0-4bbf-be8d-41e06f45789e" (UID: "a771dd48-18c0-4bbf-be8d-41e06f45789e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.875487 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g8zb\" (UniqueName: \"kubernetes.io/projected/8eb8cd04-c5a2-4a43-8648-80e74478ec75-kube-api-access-5g8zb\") pod \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.875905 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-nb\") pod \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.875977 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-svc\") pod \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.876107 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdtmj\" (UniqueName: \"kubernetes.io/projected/a771dd48-18c0-4bbf-be8d-41e06f45789e-kube-api-access-pdtmj\") pod \"a771dd48-18c0-4bbf-be8d-41e06f45789e\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.876131 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-config\") pod \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.876153 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a771dd48-18c0-4bbf-be8d-41e06f45789e-horizon-secret-key\") pod \"a771dd48-18c0-4bbf-be8d-41e06f45789e\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.876191 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a771dd48-18c0-4bbf-be8d-41e06f45789e-logs\") pod \"a771dd48-18c0-4bbf-be8d-41e06f45789e\" (UID: \"a771dd48-18c0-4bbf-be8d-41e06f45789e\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.876220 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-sb\") pod \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.876239 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-swift-storage-0\") pod \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\" (UID: \"8eb8cd04-c5a2-4a43-8648-80e74478ec75\") " Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.876599 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a771dd48-18c0-4bbf-be8d-41e06f45789e-logs" (OuterVolumeSpecName: "logs") pod "a771dd48-18c0-4bbf-be8d-41e06f45789e" (UID: "a771dd48-18c0-4bbf-be8d-41e06f45789e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.877519 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a771dd48-18c0-4bbf-be8d-41e06f45789e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.877575 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.877628 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a771dd48-18c0-4bbf-be8d-41e06f45789e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.878830 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8eb8cd04-c5a2-4a43-8648-80e74478ec75-kube-api-access-5g8zb" (OuterVolumeSpecName: "kube-api-access-5g8zb") pod "8eb8cd04-c5a2-4a43-8648-80e74478ec75" (UID: "8eb8cd04-c5a2-4a43-8648-80e74478ec75"). InnerVolumeSpecName "kube-api-access-5g8zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.881147 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a771dd48-18c0-4bbf-be8d-41e06f45789e-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a771dd48-18c0-4bbf-be8d-41e06f45789e" (UID: "a771dd48-18c0-4bbf-be8d-41e06f45789e"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.900249 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a771dd48-18c0-4bbf-be8d-41e06f45789e-kube-api-access-pdtmj" (OuterVolumeSpecName: "kube-api-access-pdtmj") pod "a771dd48-18c0-4bbf-be8d-41e06f45789e" (UID: "a771dd48-18c0-4bbf-be8d-41e06f45789e"). InnerVolumeSpecName "kube-api-access-pdtmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.922831 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8eb8cd04-c5a2-4a43-8648-80e74478ec75" (UID: "8eb8cd04-c5a2-4a43-8648-80e74478ec75"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.927483 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8eb8cd04-c5a2-4a43-8648-80e74478ec75" (UID: "8eb8cd04-c5a2-4a43-8648-80e74478ec75"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.928558 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8eb8cd04-c5a2-4a43-8648-80e74478ec75" (UID: "8eb8cd04-c5a2-4a43-8648-80e74478ec75"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.929489 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-config" (OuterVolumeSpecName: "config") pod "8eb8cd04-c5a2-4a43-8648-80e74478ec75" (UID: "8eb8cd04-c5a2-4a43-8648-80e74478ec75"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.946761 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8eb8cd04-c5a2-4a43-8648-80e74478ec75" (UID: "8eb8cd04-c5a2-4a43-8648-80e74478ec75"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.979318 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g8zb\" (UniqueName: \"kubernetes.io/projected/8eb8cd04-c5a2-4a43-8648-80e74478ec75-kube-api-access-5g8zb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.979355 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.979368 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.979379 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdtmj\" (UniqueName: \"kubernetes.io/projected/a771dd48-18c0-4bbf-be8d-41e06f45789e-kube-api-access-pdtmj\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.979389 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.979399 5014 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a771dd48-18c0-4bbf-be8d-41e06f45789e-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.979409 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:29 crc kubenswrapper[5014]: I1205 11:08:29.979420 5014 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8eb8cd04-c5a2-4a43-8648-80e74478ec75-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:30 crc kubenswrapper[5014]: E1205 11:08:30.266202 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Dec 05 11:08:30 crc kubenswrapper[5014]: E1205 11:08:30.267047 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5c6h57h5ffh67h5cdhf7hf9h5b7h59fhfbh675h58bhdfh58chdh5b6h659hf8h5bfh654h67bh6ch65fh557h5cbh8dh5b6h675h64dhffhbch645q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rlqlk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(03429d0b-f6d7-4b47-8dd9-475bf3c88881): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:08:30 crc kubenswrapper[5014]: I1205 11:08:30.740809 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6969968ff7-gxm5f" Dec 05 11:08:30 crc kubenswrapper[5014]: I1205 11:08:30.740878 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" Dec 05 11:08:30 crc kubenswrapper[5014]: I1205 11:08:30.816436 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6969968ff7-gxm5f"] Dec 05 11:08:30 crc kubenswrapper[5014]: I1205 11:08:30.822683 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6969968ff7-gxm5f"] Dec 05 11:08:30 crc kubenswrapper[5014]: I1205 11:08:30.841183 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-zjmn4"] Dec 05 11:08:30 crc kubenswrapper[5014]: I1205 11:08:30.852037 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-zjmn4"] Dec 05 11:08:31 crc kubenswrapper[5014]: I1205 11:08:31.329903 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" path="/var/lib/kubelet/pods/8eb8cd04-c5a2-4a43-8648-80e74478ec75/volumes" Dec 05 11:08:31 crc kubenswrapper[5014]: I1205 11:08:31.330925 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a771dd48-18c0-4bbf-be8d-41e06f45789e" path="/var/lib/kubelet/pods/a771dd48-18c0-4bbf-be8d-41e06f45789e/volumes" Dec 05 11:08:31 crc kubenswrapper[5014]: I1205 11:08:31.638035 5014 scope.go:117] "RemoveContainer" containerID="98126259c81017bb32e195ef6647d87e1b5d0076223535fccc2a0c3ca8b28d41" Dec 05 11:08:31 crc kubenswrapper[5014]: E1205 11:08:31.649036 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 05 11:08:31 crc kubenswrapper[5014]: E1205 11:08:31.649247 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xrthj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-zshwt_openstack(36756ede-ab38-444f-8f4a-a07da8173882): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:08:31 crc kubenswrapper[5014]: E1205 11:08:31.650815 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-zshwt" podUID="36756ede-ab38-444f-8f4a-a07da8173882" Dec 05 11:08:31 crc kubenswrapper[5014]: E1205 11:08:31.783009 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-zshwt" podUID="36756ede-ab38-444f-8f4a-a07da8173882" Dec 05 11:08:31 crc kubenswrapper[5014]: I1205 11:08:31.784249 5014 scope.go:117] "RemoveContainer" containerID="856e3a0a9a3b56de21b37a48cdcf58aa9ec73f6e75ff405c0f4a4687c0b06944" Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.130683 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5dd6878f44-n5k2l"] Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.296916 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cw4lt"] Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.306030 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-575d445b9b-l7wlc"] Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.343480 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:32 crc kubenswrapper[5014]: W1205 11:08:32.420731 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3d0ec93_b994_4bc6_9a86_7085e79c7208.slice/crio-eb140c30daf7078f9b6d33c7b5180eab8b8fd4acc86994759e77bfca772df80a WatchSource:0}: Error finding container eb140c30daf7078f9b6d33c7b5180eab8b8fd4acc86994759e77bfca772df80a: Status 404 returned error can't find the container with id eb140c30daf7078f9b6d33c7b5180eab8b8fd4acc86994759e77bfca772df80a Dec 05 11:08:32 crc kubenswrapper[5014]: W1205 11:08:32.422904 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70fdbc17_8002_4e0e_82fd_854c962ad0e9.slice/crio-6764909b596f9c5a099ee93f1eeb9561f0dba97e6295239c8c52cde5c73b7e49 WatchSource:0}: Error finding container 6764909b596f9c5a099ee93f1eeb9561f0dba97e6295239c8c52cde5c73b7e49: Status 404 returned error can't find the container with id 6764909b596f9c5a099ee93f1eeb9561f0dba97e6295239c8c52cde5c73b7e49 Dec 05 11:08:32 crc kubenswrapper[5014]: W1205 11:08:32.425961 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ff8aa8f_72f5_4431_8bc4_758dd55acdcf.slice/crio-ba0f8c2ed110f52ea3dafb8d9ae7819a7e127ad9adcacee65da1f7a32dce8cbe WatchSource:0}: Error finding container ba0f8c2ed110f52ea3dafb8d9ae7819a7e127ad9adcacee65da1f7a32dce8cbe: Status 404 returned error can't find the container with id ba0f8c2ed110f52ea3dafb8d9ae7819a7e127ad9adcacee65da1f7a32dce8cbe Dec 05 11:08:32 crc kubenswrapper[5014]: W1205 11:08:32.441401 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5b07bd8_c674_4647_a09b_eae67ddad491.slice/crio-84382cd01c4cf54c098704a2b361f0abdc97b3fa4f45f5e8c81c1d2800efcd81 WatchSource:0}: Error finding container 84382cd01c4cf54c098704a2b361f0abdc97b3fa4f45f5e8c81c1d2800efcd81: Status 404 returned error can't find the container with id 84382cd01c4cf54c098704a2b361f0abdc97b3fa4f45f5e8c81c1d2800efcd81 Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.773743 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-865b65b5c7-qjg6b" event={"ID":"d7407954-f41a-48ad-8cda-8c165c4fb5b8","Type":"ContainerStarted","Data":"49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52"} Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.774090 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-865b65b5c7-qjg6b" event={"ID":"d7407954-f41a-48ad-8cda-8c165c4fb5b8","Type":"ContainerStarted","Data":"aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52"} Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.774216 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-865b65b5c7-qjg6b" podUID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" containerName="horizon-log" containerID="cri-o://aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52" gracePeriod=30 Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.774802 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-865b65b5c7-qjg6b" podUID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" containerName="horizon" containerID="cri-o://49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52" gracePeriod=30 Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.776422 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-575d445b9b-l7wlc" event={"ID":"b5b07bd8-c674-4647-a09b-eae67ddad491","Type":"ContainerStarted","Data":"84382cd01c4cf54c098704a2b361f0abdc97b3fa4f45f5e8c81c1d2800efcd81"} Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.779778 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cw4lt" event={"ID":"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf","Type":"ContainerStarted","Data":"ba0f8c2ed110f52ea3dafb8d9ae7819a7e127ad9adcacee65da1f7a32dce8cbe"} Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.782119 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dd6878f44-n5k2l" event={"ID":"b3d0ec93-b994-4bc6-9a86-7085e79c7208","Type":"ContainerStarted","Data":"eb140c30daf7078f9b6d33c7b5180eab8b8fd4acc86994759e77bfca772df80a"} Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.784777 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5zmlx" event={"ID":"ce61806f-b767-42f7-bef7-e11d70d55086","Type":"ContainerStarted","Data":"e974b527a018cf4f07c7fc3f7d0aaff67a71464cf335065fb89189be64c92ab9"} Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.787975 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-757c4b6dbf-492tq" podUID="803d83df-f847-425f-895a-4b1ea26e6868" containerName="horizon-log" containerID="cri-o://68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9" gracePeriod=30 Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.788000 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-757c4b6dbf-492tq" event={"ID":"803d83df-f847-425f-895a-4b1ea26e6868","Type":"ContainerStarted","Data":"6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee"} Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.788053 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-757c4b6dbf-492tq" event={"ID":"803d83df-f847-425f-895a-4b1ea26e6868","Type":"ContainerStarted","Data":"68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9"} Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.788067 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-757c4b6dbf-492tq" podUID="803d83df-f847-425f-895a-4b1ea26e6868" containerName="horizon" containerID="cri-o://6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee" gracePeriod=30 Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.790713 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"70fdbc17-8002-4e0e-82fd-854c962ad0e9","Type":"ContainerStarted","Data":"6764909b596f9c5a099ee93f1eeb9561f0dba97e6295239c8c52cde5c73b7e49"} Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.835502 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-757c4b6dbf-492tq" podStartSLOduration=3.303370321 podStartE2EDuration="31.835484743s" podCreationTimestamp="2025-12-05 11:08:01 +0000 UTC" firstStartedPulling="2025-12-05 11:08:03.130781729 +0000 UTC m=+1210.078899433" lastFinishedPulling="2025-12-05 11:08:31.662896151 +0000 UTC m=+1238.611013855" observedRunningTime="2025-12-05 11:08:32.834235292 +0000 UTC m=+1239.782353006" watchObservedRunningTime="2025-12-05 11:08:32.835484743 +0000 UTC m=+1239.783602447" Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.839977 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-865b65b5c7-qjg6b" podStartSLOduration=5.302918459 podStartE2EDuration="34.83995836s" podCreationTimestamp="2025-12-05 11:07:58 +0000 UTC" firstStartedPulling="2025-12-05 11:08:00.709373569 +0000 UTC m=+1207.657491283" lastFinishedPulling="2025-12-05 11:08:30.24641348 +0000 UTC m=+1237.194531184" observedRunningTime="2025-12-05 11:08:32.811445654 +0000 UTC m=+1239.759563388" watchObservedRunningTime="2025-12-05 11:08:32.83995836 +0000 UTC m=+1239.788076064" Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.867428 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-5zmlx" podStartSLOduration=5.939091037 podStartE2EDuration="34.86740436s" podCreationTimestamp="2025-12-05 11:07:58 +0000 UTC" firstStartedPulling="2025-12-05 11:08:00.750865855 +0000 UTC m=+1207.698983559" lastFinishedPulling="2025-12-05 11:08:29.679179178 +0000 UTC m=+1236.627296882" observedRunningTime="2025-12-05 11:08:32.858751102 +0000 UTC m=+1239.806868816" watchObservedRunningTime="2025-12-05 11:08:32.86740436 +0000 UTC m=+1239.815522064" Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.936639 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:08:32 crc kubenswrapper[5014]: I1205 11:08:32.936721 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:08:33 crc kubenswrapper[5014]: I1205 11:08:33.203723 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-zjmn4" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Dec 05 11:08:33 crc kubenswrapper[5014]: I1205 11:08:33.452894 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:08:33 crc kubenswrapper[5014]: I1205 11:08:33.844926 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dd6878f44-n5k2l" event={"ID":"b3d0ec93-b994-4bc6-9a86-7085e79c7208","Type":"ContainerStarted","Data":"e3ef6d861eeac7f4670cd6dade931517836e11444cd2462e01b5fe72fb3766cb"} Dec 05 11:08:33 crc kubenswrapper[5014]: I1205 11:08:33.845245 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dd6878f44-n5k2l" event={"ID":"b3d0ec93-b994-4bc6-9a86-7085e79c7208","Type":"ContainerStarted","Data":"dd1f207c6133c61599fa712c228173fded90676011bb29274b175a31f0d78f1a"} Dec 05 11:08:33 crc kubenswrapper[5014]: I1205 11:08:33.872626 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"70fdbc17-8002-4e0e-82fd-854c962ad0e9","Type":"ContainerStarted","Data":"429f7a030a821d26856d44793146493fa87968783f7ec9e7d2e0f041a29c246f"} Dec 05 11:08:33 crc kubenswrapper[5014]: I1205 11:08:33.890366 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5dd6878f44-n5k2l" podStartSLOduration=23.89034143 podStartE2EDuration="23.89034143s" podCreationTimestamp="2025-12-05 11:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:33.889579052 +0000 UTC m=+1240.837696766" watchObservedRunningTime="2025-12-05 11:08:33.89034143 +0000 UTC m=+1240.838459134" Dec 05 11:08:33 crc kubenswrapper[5014]: I1205 11:08:33.911129 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03429d0b-f6d7-4b47-8dd9-475bf3c88881","Type":"ContainerStarted","Data":"46aaa88e1720862bf3073fdaae83dfb767f99ab32457cf45b16047437b4b6062"} Dec 05 11:08:33 crc kubenswrapper[5014]: I1205 11:08:33.965597 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2nfpj" event={"ID":"7a1f3060-95d4-4b6e-a029-505738f01238","Type":"ContainerStarted","Data":"3d5c72537e2031b13edb5f744d523cbd55b66e44375dae4086a93e06984d5c13"} Dec 05 11:08:33 crc kubenswrapper[5014]: I1205 11:08:33.990955 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ee1ae65-148c-40e6-afc5-8526bade7971","Type":"ContainerStarted","Data":"6f4cfd6805c0bd988a864e370b2ee66fd10a0d6d6ec4b6e7f1d0b54bf2afee27"} Dec 05 11:08:34 crc kubenswrapper[5014]: I1205 11:08:34.016528 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-575d445b9b-l7wlc" event={"ID":"b5b07bd8-c674-4647-a09b-eae67ddad491","Type":"ContainerStarted","Data":"e443a02676f1a3556c037bb3b22244b8623aab1fcfa287fece1ca8603b4861c8"} Dec 05 11:08:34 crc kubenswrapper[5014]: I1205 11:08:34.016592 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-575d445b9b-l7wlc" event={"ID":"b5b07bd8-c674-4647-a09b-eae67ddad491","Type":"ContainerStarted","Data":"d8006b6717d282876f27f625d07392caa9959469c12a03e70372a2915161c9d9"} Dec 05 11:08:34 crc kubenswrapper[5014]: I1205 11:08:34.030476 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cw4lt" event={"ID":"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf","Type":"ContainerStarted","Data":"69d1d8d0afd5c43eff1ba839dd2673eaf592cf90fb23209f5b59be261e2502c9"} Dec 05 11:08:34 crc kubenswrapper[5014]: I1205 11:08:34.143859 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-2nfpj" podStartSLOduration=3.743746617 podStartE2EDuration="36.143835891s" podCreationTimestamp="2025-12-05 11:07:58 +0000 UTC" firstStartedPulling="2025-12-05 11:08:00.611014185 +0000 UTC m=+1207.559131889" lastFinishedPulling="2025-12-05 11:08:33.011103459 +0000 UTC m=+1239.959221163" observedRunningTime="2025-12-05 11:08:34.075786203 +0000 UTC m=+1241.023903917" watchObservedRunningTime="2025-12-05 11:08:34.143835891 +0000 UTC m=+1241.091953605" Dec 05 11:08:34 crc kubenswrapper[5014]: I1205 11:08:34.148236 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-cw4lt" podStartSLOduration=19.148216706 podStartE2EDuration="19.148216706s" podCreationTimestamp="2025-12-05 11:08:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:34.124919756 +0000 UTC m=+1241.073037480" watchObservedRunningTime="2025-12-05 11:08:34.148216706 +0000 UTC m=+1241.096334410" Dec 05 11:08:34 crc kubenswrapper[5014]: I1205 11:08:34.188678 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-575d445b9b-l7wlc" podStartSLOduration=24.188644489 podStartE2EDuration="24.188644489s" podCreationTimestamp="2025-12-05 11:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:34.171454245 +0000 UTC m=+1241.119571959" watchObservedRunningTime="2025-12-05 11:08:34.188644489 +0000 UTC m=+1241.136762193" Dec 05 11:08:35 crc kubenswrapper[5014]: I1205 11:08:35.049015 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ee1ae65-148c-40e6-afc5-8526bade7971","Type":"ContainerStarted","Data":"99b6d98c73190b35e4f1923ab1019f752510f57bd4b744606254ee60f1969619"} Dec 05 11:08:35 crc kubenswrapper[5014]: I1205 11:08:35.059985 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" containerName="glance-log" containerID="cri-o://429f7a030a821d26856d44793146493fa87968783f7ec9e7d2e0f041a29c246f" gracePeriod=30 Dec 05 11:08:35 crc kubenswrapper[5014]: I1205 11:08:35.060229 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" containerName="glance-httpd" containerID="cri-o://c7144188298255494f0c9ddb6790209e73805a505ad102e9fc8d9b987508d991" gracePeriod=30 Dec 05 11:08:35 crc kubenswrapper[5014]: I1205 11:08:35.060362 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"70fdbc17-8002-4e0e-82fd-854c962ad0e9","Type":"ContainerStarted","Data":"c7144188298255494f0c9ddb6790209e73805a505ad102e9fc8d9b987508d991"} Dec 05 11:08:35 crc kubenswrapper[5014]: I1205 11:08:35.106132 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=27.104256505 podStartE2EDuration="27.104256505s" podCreationTimestamp="2025-12-05 11:08:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:35.088046346 +0000 UTC m=+1242.036164060" watchObservedRunningTime="2025-12-05 11:08:35.104256505 +0000 UTC m=+1242.052374209" Dec 05 11:08:36 crc kubenswrapper[5014]: I1205 11:08:36.079426 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ee1ae65-148c-40e6-afc5-8526bade7971","Type":"ContainerStarted","Data":"c5a5f72ca8e40c159108bdc35c106f726ac999ab72cc675407f910dc256e5823"} Dec 05 11:08:36 crc kubenswrapper[5014]: I1205 11:08:36.084190 5014 generic.go:334] "Generic (PLEG): container finished" podID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" containerID="c7144188298255494f0c9ddb6790209e73805a505ad102e9fc8d9b987508d991" exitCode=0 Dec 05 11:08:36 crc kubenswrapper[5014]: I1205 11:08:36.084232 5014 generic.go:334] "Generic (PLEG): container finished" podID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" containerID="429f7a030a821d26856d44793146493fa87968783f7ec9e7d2e0f041a29c246f" exitCode=143 Dec 05 11:08:36 crc kubenswrapper[5014]: I1205 11:08:36.084326 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"70fdbc17-8002-4e0e-82fd-854c962ad0e9","Type":"ContainerDied","Data":"c7144188298255494f0c9ddb6790209e73805a505ad102e9fc8d9b987508d991"} Dec 05 11:08:36 crc kubenswrapper[5014]: I1205 11:08:36.084395 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"70fdbc17-8002-4e0e-82fd-854c962ad0e9","Type":"ContainerDied","Data":"429f7a030a821d26856d44793146493fa87968783f7ec9e7d2e0f041a29c246f"} Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.694541 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.778246 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-logs\") pod \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.778421 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-httpd-run\") pod \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.778472 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-scripts\") pod \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.778530 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.778588 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knvpr\" (UniqueName: \"kubernetes.io/projected/70fdbc17-8002-4e0e-82fd-854c962ad0e9-kube-api-access-knvpr\") pod \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.778641 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-combined-ca-bundle\") pod \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.778827 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-logs" (OuterVolumeSpecName: "logs") pod "70fdbc17-8002-4e0e-82fd-854c962ad0e9" (UID: "70fdbc17-8002-4e0e-82fd-854c962ad0e9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.779296 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "70fdbc17-8002-4e0e-82fd-854c962ad0e9" (UID: "70fdbc17-8002-4e0e-82fd-854c962ad0e9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.779699 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-public-tls-certs\") pod \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.780236 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-config-data\") pod \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\" (UID: \"70fdbc17-8002-4e0e-82fd-854c962ad0e9\") " Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.781029 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.781055 5014 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70fdbc17-8002-4e0e-82fd-854c962ad0e9-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.785474 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70fdbc17-8002-4e0e-82fd-854c962ad0e9-kube-api-access-knvpr" (OuterVolumeSpecName: "kube-api-access-knvpr") pod "70fdbc17-8002-4e0e-82fd-854c962ad0e9" (UID: "70fdbc17-8002-4e0e-82fd-854c962ad0e9"). InnerVolumeSpecName "kube-api-access-knvpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.786459 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "70fdbc17-8002-4e0e-82fd-854c962ad0e9" (UID: "70fdbc17-8002-4e0e-82fd-854c962ad0e9"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.798107 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-scripts" (OuterVolumeSpecName: "scripts") pod "70fdbc17-8002-4e0e-82fd-854c962ad0e9" (UID: "70fdbc17-8002-4e0e-82fd-854c962ad0e9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.814724 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70fdbc17-8002-4e0e-82fd-854c962ad0e9" (UID: "70fdbc17-8002-4e0e-82fd-854c962ad0e9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.833543 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "70fdbc17-8002-4e0e-82fd-854c962ad0e9" (UID: "70fdbc17-8002-4e0e-82fd-854c962ad0e9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.849123 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-config-data" (OuterVolumeSpecName: "config-data") pod "70fdbc17-8002-4e0e-82fd-854c962ad0e9" (UID: "70fdbc17-8002-4e0e-82fd-854c962ad0e9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.882285 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.882372 5014 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.882386 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knvpr\" (UniqueName: \"kubernetes.io/projected/70fdbc17-8002-4e0e-82fd-854c962ad0e9-kube-api-access-knvpr\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.882399 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.882410 5014 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.882422 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70fdbc17-8002-4e0e-82fd-854c962ad0e9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.916958 5014 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 05 11:08:37 crc kubenswrapper[5014]: I1205 11:08:37.984433 5014 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.107826 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"70fdbc17-8002-4e0e-82fd-854c962ad0e9","Type":"ContainerDied","Data":"6764909b596f9c5a099ee93f1eeb9561f0dba97e6295239c8c52cde5c73b7e49"} Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.108694 5014 scope.go:117] "RemoveContainer" containerID="c7144188298255494f0c9ddb6790209e73805a505ad102e9fc8d9b987508d991" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.108839 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.152160 5014 scope.go:117] "RemoveContainer" containerID="429f7a030a821d26856d44793146493fa87968783f7ec9e7d2e0f041a29c246f" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.159141 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.171716 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.190914 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:38 crc kubenswrapper[5014]: E1205 11:08:38.192672 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerName="dnsmasq-dns" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.192727 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerName="dnsmasq-dns" Dec 05 11:08:38 crc kubenswrapper[5014]: E1205 11:08:38.192788 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" containerName="glance-httpd" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.192801 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" containerName="glance-httpd" Dec 05 11:08:38 crc kubenswrapper[5014]: E1205 11:08:38.192825 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerName="init" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.192833 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerName="init" Dec 05 11:08:38 crc kubenswrapper[5014]: E1205 11:08:38.192857 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" containerName="glance-log" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.192866 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" containerName="glance-log" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.193228 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eb8cd04-c5a2-4a43-8648-80e74478ec75" containerName="dnsmasq-dns" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.193258 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" containerName="glance-httpd" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.193300 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" containerName="glance-log" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.194675 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.204317 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.205500 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.211236 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.390773 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.390827 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.390857 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-config-data\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.390888 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-logs\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.390930 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-scripts\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.391212 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.391258 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.391301 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwqxx\" (UniqueName: \"kubernetes.io/projected/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-kube-api-access-jwqxx\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.493329 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-scripts\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.493653 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.493890 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.494037 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwqxx\" (UniqueName: \"kubernetes.io/projected/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-kube-api-access-jwqxx\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.494222 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.494399 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.494515 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.494619 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-config-data\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.494745 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-logs\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.494543 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.495086 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-logs\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.500028 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.500527 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-scripts\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.503381 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-config-data\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.513687 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.514673 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwqxx\" (UniqueName: \"kubernetes.io/projected/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-kube-api-access-jwqxx\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.536140 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " pod="openstack/glance-default-external-api-0" Dec 05 11:08:38 crc kubenswrapper[5014]: I1205 11:08:38.821570 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:08:39 crc kubenswrapper[5014]: I1205 11:08:39.150235 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=24.150217711 podStartE2EDuration="24.150217711s" podCreationTimestamp="2025-12-05 11:08:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:39.15015409 +0000 UTC m=+1246.098271814" watchObservedRunningTime="2025-12-05 11:08:39.150217711 +0000 UTC m=+1246.098335415" Dec 05 11:08:39 crc kubenswrapper[5014]: I1205 11:08:39.332201 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70fdbc17-8002-4e0e-82fd-854c962ad0e9" path="/var/lib/kubelet/pods/70fdbc17-8002-4e0e-82fd-854c962ad0e9/volumes" Dec 05 11:08:39 crc kubenswrapper[5014]: I1205 11:08:39.465778 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:08:39 crc kubenswrapper[5014]: W1205 11:08:39.469797 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bbdbdf1_2a4b_43da_bb88_a5e6d3cbd0b4.slice/crio-2add85ce5d9f676f9989d25d667eff660eb4e53dc7d7f80a9ef0be3a0d40f6aa WatchSource:0}: Error finding container 2add85ce5d9f676f9989d25d667eff660eb4e53dc7d7f80a9ef0be3a0d40f6aa: Status 404 returned error can't find the container with id 2add85ce5d9f676f9989d25d667eff660eb4e53dc7d7f80a9ef0be3a0d40f6aa Dec 05 11:08:39 crc kubenswrapper[5014]: I1205 11:08:39.529381 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:08:40 crc kubenswrapper[5014]: I1205 11:08:40.151016 5014 generic.go:334] "Generic (PLEG): container finished" podID="ce61806f-b767-42f7-bef7-e11d70d55086" containerID="e974b527a018cf4f07c7fc3f7d0aaff67a71464cf335065fb89189be64c92ab9" exitCode=0 Dec 05 11:08:40 crc kubenswrapper[5014]: I1205 11:08:40.151523 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5zmlx" event={"ID":"ce61806f-b767-42f7-bef7-e11d70d55086","Type":"ContainerDied","Data":"e974b527a018cf4f07c7fc3f7d0aaff67a71464cf335065fb89189be64c92ab9"} Dec 05 11:08:40 crc kubenswrapper[5014]: I1205 11:08:40.154626 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4","Type":"ContainerStarted","Data":"2add85ce5d9f676f9989d25d667eff660eb4e53dc7d7f80a9ef0be3a0d40f6aa"} Dec 05 11:08:40 crc kubenswrapper[5014]: I1205 11:08:40.631183 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:40 crc kubenswrapper[5014]: I1205 11:08:40.632845 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:08:40 crc kubenswrapper[5014]: I1205 11:08:40.777757 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:40 crc kubenswrapper[5014]: I1205 11:08:40.777856 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:08:41 crc kubenswrapper[5014]: I1205 11:08:41.193017 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4","Type":"ContainerStarted","Data":"460ea910d7875fd4c009eabf1d4fbd9c9a8cd150dd3200f2a269f58655537bab"} Dec 05 11:08:41 crc kubenswrapper[5014]: I1205 11:08:41.194607 5014 generic.go:334] "Generic (PLEG): container finished" podID="3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" containerID="69d1d8d0afd5c43eff1ba839dd2673eaf592cf90fb23209f5b59be261e2502c9" exitCode=0 Dec 05 11:08:41 crc kubenswrapper[5014]: I1205 11:08:41.195034 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cw4lt" event={"ID":"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf","Type":"ContainerDied","Data":"69d1d8d0afd5c43eff1ba839dd2673eaf592cf90fb23209f5b59be261e2502c9"} Dec 05 11:08:42 crc kubenswrapper[5014]: I1205 11:08:42.383341 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.607412 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.652995 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5zmlx" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.748062 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-combined-ca-bundle\") pod \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.748548 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67wrd\" (UniqueName: \"kubernetes.io/projected/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-kube-api-access-67wrd\") pod \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.748610 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-config-data\") pod \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.748659 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-fernet-keys\") pod \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.748717 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-credential-keys\") pod \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.748741 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-scripts\") pod \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\" (UID: \"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.754653 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" (UID: "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.760902 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-kube-api-access-67wrd" (OuterVolumeSpecName: "kube-api-access-67wrd") pod "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" (UID: "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf"). InnerVolumeSpecName "kube-api-access-67wrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.771441 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-scripts" (OuterVolumeSpecName: "scripts") pod "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" (UID: "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.785451 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" (UID: "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.808428 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" (UID: "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.832769 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-config-data" (OuterVolumeSpecName: "config-data") pod "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" (UID: "3ff8aa8f-72f5-4431-8bc4-758dd55acdcf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.853097 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5w55\" (UniqueName: \"kubernetes.io/projected/ce61806f-b767-42f7-bef7-e11d70d55086-kube-api-access-c5w55\") pod \"ce61806f-b767-42f7-bef7-e11d70d55086\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.853602 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-scripts\") pod \"ce61806f-b767-42f7-bef7-e11d70d55086\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.853694 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce61806f-b767-42f7-bef7-e11d70d55086-logs\") pod \"ce61806f-b767-42f7-bef7-e11d70d55086\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.853883 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-config-data\") pod \"ce61806f-b767-42f7-bef7-e11d70d55086\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.854110 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-combined-ca-bundle\") pod \"ce61806f-b767-42f7-bef7-e11d70d55086\" (UID: \"ce61806f-b767-42f7-bef7-e11d70d55086\") " Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.854670 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.854766 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67wrd\" (UniqueName: \"kubernetes.io/projected/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-kube-api-access-67wrd\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.854833 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.855058 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce61806f-b767-42f7-bef7-e11d70d55086-logs" (OuterVolumeSpecName: "logs") pod "ce61806f-b767-42f7-bef7-e11d70d55086" (UID: "ce61806f-b767-42f7-bef7-e11d70d55086"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.858425 5014 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.858476 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.858488 5014 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.861888 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce61806f-b767-42f7-bef7-e11d70d55086-kube-api-access-c5w55" (OuterVolumeSpecName: "kube-api-access-c5w55") pod "ce61806f-b767-42f7-bef7-e11d70d55086" (UID: "ce61806f-b767-42f7-bef7-e11d70d55086"). InnerVolumeSpecName "kube-api-access-c5w55". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.872675 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-scripts" (OuterVolumeSpecName: "scripts") pod "ce61806f-b767-42f7-bef7-e11d70d55086" (UID: "ce61806f-b767-42f7-bef7-e11d70d55086"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.895922 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-config-data" (OuterVolumeSpecName: "config-data") pod "ce61806f-b767-42f7-bef7-e11d70d55086" (UID: "ce61806f-b767-42f7-bef7-e11d70d55086"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.902511 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce61806f-b767-42f7-bef7-e11d70d55086" (UID: "ce61806f-b767-42f7-bef7-e11d70d55086"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.961219 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.961561 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce61806f-b767-42f7-bef7-e11d70d55086-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.961572 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.961583 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce61806f-b767-42f7-bef7-e11d70d55086-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:43 crc kubenswrapper[5014]: I1205 11:08:43.961594 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5w55\" (UniqueName: \"kubernetes.io/projected/ce61806f-b767-42f7-bef7-e11d70d55086-kube-api-access-c5w55\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.224020 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5zmlx" event={"ID":"ce61806f-b767-42f7-bef7-e11d70d55086","Type":"ContainerDied","Data":"52854b55d129669789029745b4c62b77fbeb4176574d07139b625198f98bccd5"} Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.224060 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52854b55d129669789029745b4c62b77fbeb4176574d07139b625198f98bccd5" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.224106 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5zmlx" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.226739 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03429d0b-f6d7-4b47-8dd9-475bf3c88881","Type":"ContainerStarted","Data":"bb5125794779a1e980da5772450649d5158cbe00df3d1bfea79e7f1f807bb4da"} Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.229150 5014 generic.go:334] "Generic (PLEG): container finished" podID="7a1f3060-95d4-4b6e-a029-505738f01238" containerID="3d5c72537e2031b13edb5f744d523cbd55b66e44375dae4086a93e06984d5c13" exitCode=0 Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.229191 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2nfpj" event={"ID":"7a1f3060-95d4-4b6e-a029-505738f01238","Type":"ContainerDied","Data":"3d5c72537e2031b13edb5f744d523cbd55b66e44375dae4086a93e06984d5c13"} Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.232661 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cw4lt" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.232683 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cw4lt" event={"ID":"3ff8aa8f-72f5-4431-8bc4-758dd55acdcf","Type":"ContainerDied","Data":"ba0f8c2ed110f52ea3dafb8d9ae7819a7e127ad9adcacee65da1f7a32dce8cbe"} Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.232728 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba0f8c2ed110f52ea3dafb8d9ae7819a7e127ad9adcacee65da1f7a32dce8cbe" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.771675 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-c6c5974d5-l72zk"] Dec 05 11:08:44 crc kubenswrapper[5014]: E1205 11:08:44.772090 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" containerName="keystone-bootstrap" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.772102 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" containerName="keystone-bootstrap" Dec 05 11:08:44 crc kubenswrapper[5014]: E1205 11:08:44.772117 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce61806f-b767-42f7-bef7-e11d70d55086" containerName="placement-db-sync" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.772122 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce61806f-b767-42f7-bef7-e11d70d55086" containerName="placement-db-sync" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.778653 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce61806f-b767-42f7-bef7-e11d70d55086" containerName="placement-db-sync" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.778704 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" containerName="keystone-bootstrap" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.780028 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.784936 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.785059 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s452q" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.785106 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.785248 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.785369 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.786152 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.790533 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-public-tls-certs\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.790601 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxr6h\" (UniqueName: \"kubernetes.io/projected/d9c4da24-4b94-4a9f-982f-9114df83cc67-kube-api-access-nxr6h\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.790624 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-fernet-keys\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.790689 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-credential-keys\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.790746 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-scripts\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.790768 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-combined-ca-bundle\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.790781 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-internal-tls-certs\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.790831 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-config-data\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.798164 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c6c5974d5-l72zk"] Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.893849 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-credential-keys\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.893939 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-scripts\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.893963 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-combined-ca-bundle\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.893981 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-internal-tls-certs\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.894032 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-config-data\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.894080 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-public-tls-certs\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.894124 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxr6h\" (UniqueName: \"kubernetes.io/projected/d9c4da24-4b94-4a9f-982f-9114df83cc67-kube-api-access-nxr6h\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.894142 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-fernet-keys\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.905511 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-config-data\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.909416 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-75c6d4746d-f9vpc"] Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.910891 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.911887 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-internal-tls-certs\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.912314 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-fernet-keys\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.912566 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-combined-ca-bundle\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.915924 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.916149 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.916333 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.917055 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.919476 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-public-tls-certs\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.921822 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-scripts\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.930515 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-ckqjk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.930766 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d9c4da24-4b94-4a9f-982f-9114df83cc67-credential-keys\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.931078 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxr6h\" (UniqueName: \"kubernetes.io/projected/d9c4da24-4b94-4a9f-982f-9114df83cc67-kube-api-access-nxr6h\") pod \"keystone-c6c5974d5-l72zk\" (UID: \"d9c4da24-4b94-4a9f-982f-9114df83cc67\") " pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:44 crc kubenswrapper[5014]: I1205 11:08:44.946945 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-75c6d4746d-f9vpc"] Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.004793 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81877d92-8552-4149-a92a-9a9bdfc431b4-logs\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.005013 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-scripts\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.005072 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-combined-ca-bundle\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.005147 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-public-tls-certs\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.005237 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llwxr\" (UniqueName: \"kubernetes.io/projected/81877d92-8552-4149-a92a-9a9bdfc431b4-kube-api-access-llwxr\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.005294 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-config-data\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.005312 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-internal-tls-certs\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.107031 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81877d92-8552-4149-a92a-9a9bdfc431b4-logs\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.107186 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-scripts\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.107227 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-combined-ca-bundle\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.107318 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-public-tls-certs\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.107400 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llwxr\" (UniqueName: \"kubernetes.io/projected/81877d92-8552-4149-a92a-9a9bdfc431b4-kube-api-access-llwxr\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.107453 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-config-data\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.107475 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-internal-tls-certs\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.108563 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81877d92-8552-4149-a92a-9a9bdfc431b4-logs\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.113663 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-combined-ca-bundle\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.114292 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-internal-tls-certs\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.120263 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-scripts\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.123237 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-public-tls-certs\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.128532 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.134173 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81877d92-8552-4149-a92a-9a9bdfc431b4-config-data\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.141716 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llwxr\" (UniqueName: \"kubernetes.io/projected/81877d92-8552-4149-a92a-9a9bdfc431b4-kube-api-access-llwxr\") pod \"placement-75c6d4746d-f9vpc\" (UID: \"81877d92-8552-4149-a92a-9a9bdfc431b4\") " pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.244263 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4","Type":"ContainerStarted","Data":"fcd92f4a8b32ab8955ca1a0b376a1e41627729516993fbfeff7770a5ca35d1f8"} Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.285876 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.28585609 podStartE2EDuration="7.28585609s" podCreationTimestamp="2025-12-05 11:08:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:45.280669064 +0000 UTC m=+1252.228786788" watchObservedRunningTime="2025-12-05 11:08:45.28585609 +0000 UTC m=+1252.233973794" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.387691 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.775810 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.944635 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-combined-ca-bundle\") pod \"7a1f3060-95d4-4b6e-a029-505738f01238\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.945006 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-db-sync-config-data\") pod \"7a1f3060-95d4-4b6e-a029-505738f01238\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.945188 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hl5zm\" (UniqueName: \"kubernetes.io/projected/7a1f3060-95d4-4b6e-a029-505738f01238-kube-api-access-hl5zm\") pod \"7a1f3060-95d4-4b6e-a029-505738f01238\" (UID: \"7a1f3060-95d4-4b6e-a029-505738f01238\") " Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.953081 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a1f3060-95d4-4b6e-a029-505738f01238-kube-api-access-hl5zm" (OuterVolumeSpecName: "kube-api-access-hl5zm") pod "7a1f3060-95d4-4b6e-a029-505738f01238" (UID: "7a1f3060-95d4-4b6e-a029-505738f01238"). InnerVolumeSpecName "kube-api-access-hl5zm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.967081 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7a1f3060-95d4-4b6e-a029-505738f01238" (UID: "7a1f3060-95d4-4b6e-a029-505738f01238"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.983382 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.983433 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.983445 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:45 crc kubenswrapper[5014]: I1205 11:08:45.983458 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.030417 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.035906 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.062985 5014 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.063345 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hl5zm\" (UniqueName: \"kubernetes.io/projected/7a1f3060-95d4-4b6e-a029-505738f01238-kube-api-access-hl5zm\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.084527 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a1f3060-95d4-4b6e-a029-505738f01238" (UID: "7a1f3060-95d4-4b6e-a029-505738f01238"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.135502 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c6c5974d5-l72zk"] Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.165048 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a1f3060-95d4-4b6e-a029-505738f01238-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.210905 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-75c6d4746d-f9vpc"] Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.266573 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c6c5974d5-l72zk" event={"ID":"d9c4da24-4b94-4a9f-982f-9114df83cc67","Type":"ContainerStarted","Data":"08beae5d65d6ddc2dfdaf9de303f67bcc670370f8bd3042827a4c531a90aa51b"} Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.274025 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2nfpj" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.275097 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2nfpj" event={"ID":"7a1f3060-95d4-4b6e-a029-505738f01238","Type":"ContainerDied","Data":"78cca313b32b18450a8c494326deed6065f2f31eff35c60de225b48cd8e1d371"} Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.275141 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78cca313b32b18450a8c494326deed6065f2f31eff35c60de225b48cd8e1d371" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.434435 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-78b5c6757c-hdtxh"] Dec 05 11:08:46 crc kubenswrapper[5014]: E1205 11:08:46.435221 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a1f3060-95d4-4b6e-a029-505738f01238" containerName="barbican-db-sync" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.435240 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a1f3060-95d4-4b6e-a029-505738f01238" containerName="barbican-db-sync" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.435459 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a1f3060-95d4-4b6e-a029-505738f01238" containerName="barbican-db-sync" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.436591 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.442228 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.442574 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.447235 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-kj5jm" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.453454 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-78b5c6757c-hdtxh"] Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.466055 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-c76ffd784-m8mzt"] Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.489183 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-c76ffd784-m8mzt"] Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.489295 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.496342 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.554645 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-krrbp"] Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.562346 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.636222 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7a5e0260-dfe5-4f24-82bc-e172af4db809-config-data-custom\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.636349 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wndd\" (UniqueName: \"kubernetes.io/projected/9cfdc764-b85e-48e8-8a0e-0945c00f278f-kube-api-access-7wndd\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.636419 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cfdc764-b85e-48e8-8a0e-0945c00f278f-combined-ca-bundle\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.636482 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cfdc764-b85e-48e8-8a0e-0945c00f278f-config-data\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.636542 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a5e0260-dfe5-4f24-82bc-e172af4db809-config-data\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.636642 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9cfdc764-b85e-48e8-8a0e-0945c00f278f-config-data-custom\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.636697 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a5e0260-dfe5-4f24-82bc-e172af4db809-logs\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.636721 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5e0260-dfe5-4f24-82bc-e172af4db809-combined-ca-bundle\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.636754 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9cfdc764-b85e-48e8-8a0e-0945c00f278f-logs\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.636789 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42p5q\" (UniqueName: \"kubernetes.io/projected/7a5e0260-dfe5-4f24-82bc-e172af4db809-kube-api-access-42p5q\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.651643 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-krrbp"] Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.730989 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-777967dc98-c927p"] Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.735715 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.739611 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wndd\" (UniqueName: \"kubernetes.io/projected/9cfdc764-b85e-48e8-8a0e-0945c00f278f-kube-api-access-7wndd\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.739668 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-svc\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.739717 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cfdc764-b85e-48e8-8a0e-0945c00f278f-combined-ca-bundle\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.739755 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc5fh\" (UniqueName: \"kubernetes.io/projected/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-kube-api-access-gc5fh\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.739784 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cfdc764-b85e-48e8-8a0e-0945c00f278f-config-data\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.739823 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a5e0260-dfe5-4f24-82bc-e172af4db809-config-data\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.739846 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-nb\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.739879 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-config\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.739931 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9cfdc764-b85e-48e8-8a0e-0945c00f278f-config-data-custom\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.739963 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-sb\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.740002 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a5e0260-dfe5-4f24-82bc-e172af4db809-logs\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.740024 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5e0260-dfe5-4f24-82bc-e172af4db809-combined-ca-bundle\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.740048 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9cfdc764-b85e-48e8-8a0e-0945c00f278f-logs\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.740075 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42p5q\" (UniqueName: \"kubernetes.io/projected/7a5e0260-dfe5-4f24-82bc-e172af4db809-kube-api-access-42p5q\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.740140 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7a5e0260-dfe5-4f24-82bc-e172af4db809-config-data-custom\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.740144 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.740183 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-swift-storage-0\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.750702 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9cfdc764-b85e-48e8-8a0e-0945c00f278f-logs\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.752802 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a5e0260-dfe5-4f24-82bc-e172af4db809-logs\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.765206 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a5e0260-dfe5-4f24-82bc-e172af4db809-combined-ca-bundle\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.776897 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9cfdc764-b85e-48e8-8a0e-0945c00f278f-config-data-custom\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.776960 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a5e0260-dfe5-4f24-82bc-e172af4db809-config-data\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.780089 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cfdc764-b85e-48e8-8a0e-0945c00f278f-combined-ca-bundle\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.780604 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wndd\" (UniqueName: \"kubernetes.io/projected/9cfdc764-b85e-48e8-8a0e-0945c00f278f-kube-api-access-7wndd\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.782992 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7a5e0260-dfe5-4f24-82bc-e172af4db809-config-data-custom\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.786123 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-777967dc98-c927p"] Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.788558 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cfdc764-b85e-48e8-8a0e-0945c00f278f-config-data\") pod \"barbican-keystone-listener-c76ffd784-m8mzt\" (UID: \"9cfdc764-b85e-48e8-8a0e-0945c00f278f\") " pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.808086 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42p5q\" (UniqueName: \"kubernetes.io/projected/7a5e0260-dfe5-4f24-82bc-e172af4db809-kube-api-access-42p5q\") pod \"barbican-worker-78b5c6757c-hdtxh\" (UID: \"7a5e0260-dfe5-4f24-82bc-e172af4db809\") " pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.840643 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.841889 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-config\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.841934 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-sb\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.841975 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data-custom\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.842006 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2ed7afe-3f44-4214-b156-0404222f92a8-logs\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.842028 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-combined-ca-bundle\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.842055 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-swift-storage-0\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.842075 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-svc\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.842099 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zsxr\" (UniqueName: \"kubernetes.io/projected/e2ed7afe-3f44-4214-b156-0404222f92a8-kube-api-access-4zsxr\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.842138 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.842158 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc5fh\" (UniqueName: \"kubernetes.io/projected/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-kube-api-access-gc5fh\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.842232 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-nb\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.843043 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-nb\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.843643 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-config\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.844226 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-sb\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.844774 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-swift-storage-0\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.845378 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-svc\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.866400 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc5fh\" (UniqueName: \"kubernetes.io/projected/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-kube-api-access-gc5fh\") pod \"dnsmasq-dns-6d66f584d7-krrbp\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.944370 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data-custom\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.944423 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2ed7afe-3f44-4214-b156-0404222f92a8-logs\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.944473 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-combined-ca-bundle\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.944535 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zsxr\" (UniqueName: \"kubernetes.io/projected/e2ed7afe-3f44-4214-b156-0404222f92a8-kube-api-access-4zsxr\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.944569 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.945764 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2ed7afe-3f44-4214-b156-0404222f92a8-logs\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.949803 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data-custom\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.951312 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.967347 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-combined-ca-bundle\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.972009 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zsxr\" (UniqueName: \"kubernetes.io/projected/e2ed7afe-3f44-4214-b156-0404222f92a8-kube-api-access-4zsxr\") pod \"barbican-api-777967dc98-c927p\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.986627 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:46 crc kubenswrapper[5014]: I1205 11:08:46.990562 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.088755 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-78b5c6757c-hdtxh" Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.408927 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75c6d4746d-f9vpc" event={"ID":"81877d92-8552-4149-a92a-9a9bdfc431b4","Type":"ContainerStarted","Data":"44244d68448278a4158e06107a4d97888f87cab40a43eec34f0d994ee0afa621"} Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.409223 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75c6d4746d-f9vpc" event={"ID":"81877d92-8552-4149-a92a-9a9bdfc431b4","Type":"ContainerStarted","Data":"c85977d85c5c9f74eab94af34dc491d429155fb72aca6456f048611af366ae12"} Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.412306 5014 generic.go:334] "Generic (PLEG): container finished" podID="eb060c27-a3ff-4233-9c8f-a5614f4ef60b" containerID="f8f46c752ae3707151ef0059d630531aead2e7fdc5de832aeb5656ddaf24ebe4" exitCode=0 Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.412355 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-fnq4z" event={"ID":"eb060c27-a3ff-4233-9c8f-a5614f4ef60b","Type":"ContainerDied","Data":"f8f46c752ae3707151ef0059d630531aead2e7fdc5de832aeb5656ddaf24ebe4"} Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.419524 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c6c5974d5-l72zk" event={"ID":"d9c4da24-4b94-4a9f-982f-9114df83cc67","Type":"ContainerStarted","Data":"c300cac1c8e685d9f1d3337470c40602d6827cc18bb4b2db4084422fe7f61684"} Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.475221 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-c6c5974d5-l72zk" podStartSLOduration=3.475200491 podStartE2EDuration="3.475200491s" podCreationTimestamp="2025-12-05 11:08:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:47.46309979 +0000 UTC m=+1254.411217494" watchObservedRunningTime="2025-12-05 11:08:47.475200491 +0000 UTC m=+1254.423318195" Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.540824 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-c76ffd784-m8mzt"] Dec 05 11:08:47 crc kubenswrapper[5014]: W1205 11:08:47.587095 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9cfdc764_b85e_48e8_8a0e_0945c00f278f.slice/crio-6183885188a514aff9875183bf2ebc8a608c26a487dd9e50d38887f80658faf0 WatchSource:0}: Error finding container 6183885188a514aff9875183bf2ebc8a608c26a487dd9e50d38887f80658faf0: Status 404 returned error can't find the container with id 6183885188a514aff9875183bf2ebc8a608c26a487dd9e50d38887f80658faf0 Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.689397 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-krrbp"] Dec 05 11:08:47 crc kubenswrapper[5014]: W1205 11:08:47.744690 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0269d6fe_d6ee_4840_8ae2_cde3db4a989b.slice/crio-cfe3dbe88a69a20832b284895cdc94e0deb847895c2fba121f7d1f760b6ddc8a WatchSource:0}: Error finding container cfe3dbe88a69a20832b284895cdc94e0deb847895c2fba121f7d1f760b6ddc8a: Status 404 returned error can't find the container with id cfe3dbe88a69a20832b284895cdc94e0deb847895c2fba121f7d1f760b6ddc8a Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.878350 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-777967dc98-c927p"] Dec 05 11:08:47 crc kubenswrapper[5014]: I1205 11:08:47.890130 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-78b5c6757c-hdtxh"] Dec 05 11:08:47 crc kubenswrapper[5014]: W1205 11:08:47.900093 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a5e0260_dfe5_4f24_82bc_e172af4db809.slice/crio-29ffbdf41d66001d9e772922a6e3f04b023bd21d4b907f8449a94580e14b3b15 WatchSource:0}: Error finding container 29ffbdf41d66001d9e772922a6e3f04b023bd21d4b907f8449a94580e14b3b15: Status 404 returned error can't find the container with id 29ffbdf41d66001d9e772922a6e3f04b023bd21d4b907f8449a94580e14b3b15 Dec 05 11:08:47 crc kubenswrapper[5014]: W1205 11:08:47.935031 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2ed7afe_3f44_4214_b156_0404222f92a8.slice/crio-e0e050d3af172c2b5bace84fbd1b31f6aad64fdb52b175a0a27f23e9dca27ca7 WatchSource:0}: Error finding container e0e050d3af172c2b5bace84fbd1b31f6aad64fdb52b175a0a27f23e9dca27ca7: Status 404 returned error can't find the container with id e0e050d3af172c2b5bace84fbd1b31f6aad64fdb52b175a0a27f23e9dca27ca7 Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.453327 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-zshwt" event={"ID":"36756ede-ab38-444f-8f4a-a07da8173882","Type":"ContainerStarted","Data":"374859f6acd833eec3d01eb214ae18360f03c0bb616482cedb65d51ec5502169"} Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.462544 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75c6d4746d-f9vpc" event={"ID":"81877d92-8552-4149-a92a-9a9bdfc431b4","Type":"ContainerStarted","Data":"e2fb0079d7df1ae14851a77aa7a52bef99223762e2c55d197826fe86fed480e0"} Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.463388 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.463516 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.470610 5014 generic.go:334] "Generic (PLEG): container finished" podID="0269d6fe-d6ee-4840-8ae2-cde3db4a989b" containerID="f141cba59289beaf1b5a6ab1951d9dbd6333a8520fa64b3803f98740b818fc0c" exitCode=0 Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.470683 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" event={"ID":"0269d6fe-d6ee-4840-8ae2-cde3db4a989b","Type":"ContainerDied","Data":"f141cba59289beaf1b5a6ab1951d9dbd6333a8520fa64b3803f98740b818fc0c"} Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.470708 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" event={"ID":"0269d6fe-d6ee-4840-8ae2-cde3db4a989b","Type":"ContainerStarted","Data":"cfe3dbe88a69a20832b284895cdc94e0deb847895c2fba121f7d1f760b6ddc8a"} Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.485609 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-zshwt" podStartSLOduration=5.131948041 podStartE2EDuration="50.485591218s" podCreationTimestamp="2025-12-05 11:07:58 +0000 UTC" firstStartedPulling="2025-12-05 11:08:00.749054101 +0000 UTC m=+1207.697171795" lastFinishedPulling="2025-12-05 11:08:46.102697258 +0000 UTC m=+1253.050814972" observedRunningTime="2025-12-05 11:08:48.474167694 +0000 UTC m=+1255.422285398" watchObservedRunningTime="2025-12-05 11:08:48.485591218 +0000 UTC m=+1255.433708922" Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.486527 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78b5c6757c-hdtxh" event={"ID":"7a5e0260-dfe5-4f24-82bc-e172af4db809","Type":"ContainerStarted","Data":"29ffbdf41d66001d9e772922a6e3f04b023bd21d4b907f8449a94580e14b3b15"} Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.490863 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-777967dc98-c927p" event={"ID":"e2ed7afe-3f44-4214-b156-0404222f92a8","Type":"ContainerStarted","Data":"3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c"} Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.490989 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-777967dc98-c927p" event={"ID":"e2ed7afe-3f44-4214-b156-0404222f92a8","Type":"ContainerStarted","Data":"e0e050d3af172c2b5bace84fbd1b31f6aad64fdb52b175a0a27f23e9dca27ca7"} Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.513092 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-75c6d4746d-f9vpc" podStartSLOduration=4.51307271 podStartE2EDuration="4.51307271s" podCreationTimestamp="2025-12-05 11:08:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:48.509182876 +0000 UTC m=+1255.457300590" watchObservedRunningTime="2025-12-05 11:08:48.51307271 +0000 UTC m=+1255.461190424" Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.520882 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" event={"ID":"9cfdc764-b85e-48e8-8a0e-0945c00f278f","Type":"ContainerStarted","Data":"6183885188a514aff9875183bf2ebc8a608c26a487dd9e50d38887f80658faf0"} Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.522003 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.822529 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.822606 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.932508 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 11:08:48 crc kubenswrapper[5014]: I1205 11:08:48.949558 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.118669 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.228516 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcrqn\" (UniqueName: \"kubernetes.io/projected/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-kube-api-access-xcrqn\") pod \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.228645 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-combined-ca-bundle\") pod \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.228707 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-config\") pod \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\" (UID: \"eb060c27-a3ff-4233-9c8f-a5614f4ef60b\") " Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.240336 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-kube-api-access-xcrqn" (OuterVolumeSpecName: "kube-api-access-xcrqn") pod "eb060c27-a3ff-4233-9c8f-a5614f4ef60b" (UID: "eb060c27-a3ff-4233-9c8f-a5614f4ef60b"). InnerVolumeSpecName "kube-api-access-xcrqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.287554 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb060c27-a3ff-4233-9c8f-a5614f4ef60b" (UID: "eb060c27-a3ff-4233-9c8f-a5614f4ef60b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.333398 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-config" (OuterVolumeSpecName: "config") pod "eb060c27-a3ff-4233-9c8f-a5614f4ef60b" (UID: "eb060c27-a3ff-4233-9c8f-a5614f4ef60b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.334647 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcrqn\" (UniqueName: \"kubernetes.io/projected/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-kube-api-access-xcrqn\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.334683 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.334694 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/eb060c27-a3ff-4233-9c8f-a5614f4ef60b-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.549851 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" event={"ID":"0269d6fe-d6ee-4840-8ae2-cde3db4a989b","Type":"ContainerStarted","Data":"cf77c114e27f181b7dfb00657c7669c6d5c1e2fbac29c92fa79e397f13de5d32"} Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.551137 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.563706 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-777967dc98-c927p" event={"ID":"e2ed7afe-3f44-4214-b156-0404222f92a8","Type":"ContainerStarted","Data":"bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e"} Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.564792 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.564828 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.568427 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-fnq4z" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.568931 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-fnq4z" event={"ID":"eb060c27-a3ff-4233-9c8f-a5614f4ef60b","Type":"ContainerDied","Data":"271271af052d87ec67fb7ae992520ff3acf9536b52fb18e0be05b1561b89df33"} Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.568957 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="271271af052d87ec67fb7ae992520ff3acf9536b52fb18e0be05b1561b89df33" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.570801 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.570827 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.591809 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" podStartSLOduration=3.591781761 podStartE2EDuration="3.591781761s" podCreationTimestamp="2025-12-05 11:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:49.579938537 +0000 UTC m=+1256.528056251" watchObservedRunningTime="2025-12-05 11:08:49.591781761 +0000 UTC m=+1256.539899475" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.697685 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-777967dc98-c927p" podStartSLOduration=3.6976668 podStartE2EDuration="3.6976668s" podCreationTimestamp="2025-12-05 11:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:49.687336722 +0000 UTC m=+1256.635454456" watchObservedRunningTime="2025-12-05 11:08:49.6976668 +0000 UTC m=+1256.645784504" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.840319 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-krrbp"] Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.866932 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-rs5m9"] Dec 05 11:08:49 crc kubenswrapper[5014]: E1205 11:08:49.867364 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb060c27-a3ff-4233-9c8f-a5614f4ef60b" containerName="neutron-db-sync" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.867380 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb060c27-a3ff-4233-9c8f-a5614f4ef60b" containerName="neutron-db-sync" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.867573 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb060c27-a3ff-4233-9c8f-a5614f4ef60b" containerName="neutron-db-sync" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.868519 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.903313 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.903744 5014 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.906610 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-rs5m9"] Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.985618 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.985697 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.985763 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-svc\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.985818 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85jqq\" (UniqueName: \"kubernetes.io/projected/5693e3c7-0809-4e13-9e46-315780139182-kube-api-access-85jqq\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.985857 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:49 crc kubenswrapper[5014]: I1205 11:08:49.985887 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-config\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.076090 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-569f7f9774-fb89t"] Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.077751 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.083251 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.088585 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.088834 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-cjj6k" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.097767 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85jqq\" (UniqueName: \"kubernetes.io/projected/5693e3c7-0809-4e13-9e46-315780139182-kube-api-access-85jqq\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.097848 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.097899 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-config\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.098125 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.098173 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.098315 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-svc\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.099163 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-svc\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.104029 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.104534 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.104860 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-config\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.109417 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.112595 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.113349 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-569f7f9774-fb89t"] Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.129990 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85jqq\" (UniqueName: \"kubernetes.io/projected/5693e3c7-0809-4e13-9e46-315780139182-kube-api-access-85jqq\") pod \"dnsmasq-dns-688c87cc99-rs5m9\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.204153 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-config\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.204287 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-httpd-config\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.204362 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-combined-ca-bundle\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.204458 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmsjr\" (UniqueName: \"kubernetes.io/projected/ce101ba1-a588-4de6-bac6-964f608c509d-kube-api-access-nmsjr\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.204626 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-ovndb-tls-certs\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.220022 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.309383 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-ovndb-tls-certs\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.309463 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-config\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.309483 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-httpd-config\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.309506 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-combined-ca-bundle\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.309556 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmsjr\" (UniqueName: \"kubernetes.io/projected/ce101ba1-a588-4de6-bac6-964f608c509d-kube-api-access-nmsjr\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.318543 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-httpd-config\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.319008 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-combined-ca-bundle\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.323931 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-ovndb-tls-certs\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.336775 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-config\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.343378 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmsjr\" (UniqueName: \"kubernetes.io/projected/ce101ba1-a588-4de6-bac6-964f608c509d-kube-api-access-nmsjr\") pod \"neutron-569f7f9774-fb89t\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.485687 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-c6dbf5d74-pbtjs"] Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.500182 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.504578 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.512141 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-c6dbf5d74-pbtjs"] Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.513478 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.514052 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.626441 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5dd6878f44-n5k2l" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.630143 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-combined-ca-bundle\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.630187 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-public-tls-certs\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.630242 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-internal-tls-certs\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.630301 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9klc\" (UniqueName: \"kubernetes.io/projected/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-kube-api-access-g9klc\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.630336 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-logs\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.630369 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-config-data-custom\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.630388 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-config-data\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.732398 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-combined-ca-bundle\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.732483 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-public-tls-certs\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.732608 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-internal-tls-certs\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.732685 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9klc\" (UniqueName: \"kubernetes.io/projected/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-kube-api-access-g9klc\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.732787 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-logs\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.732864 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-config-data-custom\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.732890 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-config-data\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.733702 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-logs\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.741082 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-public-tls-certs\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.761126 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-combined-ca-bundle\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.761510 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-config-data\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.762328 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-internal-tls-certs\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.770932 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-config-data-custom\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.777158 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9klc\" (UniqueName: \"kubernetes.io/projected/65c45e15-99d6-4c93-ae6e-67bd07e7eba9-kube-api-access-g9klc\") pod \"barbican-api-c6dbf5d74-pbtjs\" (UID: \"65c45e15-99d6-4c93-ae6e-67bd07e7eba9\") " pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.786854 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-575d445b9b-l7wlc" podUID="b5b07bd8-c674-4647-a09b-eae67ddad491" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.847150 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.895289 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 11:08:50 crc kubenswrapper[5014]: I1205 11:08:50.937884 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-rs5m9"] Dec 05 11:08:51 crc kubenswrapper[5014]: I1205 11:08:51.636697 5014 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:08:51 crc kubenswrapper[5014]: I1205 11:08:51.636954 5014 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:08:51 crc kubenswrapper[5014]: I1205 11:08:51.638239 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" podUID="0269d6fe-d6ee-4840-8ae2-cde3db4a989b" containerName="dnsmasq-dns" containerID="cri-o://cf77c114e27f181b7dfb00657c7669c6d5c1e2fbac29c92fa79e397f13de5d32" gracePeriod=10 Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.657542 5014 generic.go:334] "Generic (PLEG): container finished" podID="0269d6fe-d6ee-4840-8ae2-cde3db4a989b" containerID="cf77c114e27f181b7dfb00657c7669c6d5c1e2fbac29c92fa79e397f13de5d32" exitCode=0 Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.658115 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" event={"ID":"0269d6fe-d6ee-4840-8ae2-cde3db4a989b","Type":"ContainerDied","Data":"cf77c114e27f181b7dfb00657c7669c6d5c1e2fbac29c92fa79e397f13de5d32"} Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.679233 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" event={"ID":"5693e3c7-0809-4e13-9e46-315780139182","Type":"ContainerStarted","Data":"3ac8910e83d0d0a0dfee81bfdd5257ea5ebda9c055b521066aa3e9b089100185"} Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.689497 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.741574 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-c6dbf5d74-pbtjs"] Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.780863 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-sb\") pod \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.780937 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-swift-storage-0\") pod \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.780968 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-nb\") pod \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.781178 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-svc\") pod \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.781233 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc5fh\" (UniqueName: \"kubernetes.io/projected/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-kube-api-access-gc5fh\") pod \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.781359 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-config\") pod \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\" (UID: \"0269d6fe-d6ee-4840-8ae2-cde3db4a989b\") " Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.788814 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-kube-api-access-gc5fh" (OuterVolumeSpecName: "kube-api-access-gc5fh") pod "0269d6fe-d6ee-4840-8ae2-cde3db4a989b" (UID: "0269d6fe-d6ee-4840-8ae2-cde3db4a989b"). InnerVolumeSpecName "kube-api-access-gc5fh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:08:52 crc kubenswrapper[5014]: W1205 11:08:52.831833 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65c45e15_99d6_4c93_ae6e_67bd07e7eba9.slice/crio-62047f3c31e0f181ca0f5076b509087d5f25eba61002a684c954bdab80173165 WatchSource:0}: Error finding container 62047f3c31e0f181ca0f5076b509087d5f25eba61002a684c954bdab80173165: Status 404 returned error can't find the container with id 62047f3c31e0f181ca0f5076b509087d5f25eba61002a684c954bdab80173165 Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.835733 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.835824 5014 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.837410 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.889011 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc5fh\" (UniqueName: \"kubernetes.io/projected/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-kube-api-access-gc5fh\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.919459 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-config" (OuterVolumeSpecName: "config") pod "0269d6fe-d6ee-4840-8ae2-cde3db4a989b" (UID: "0269d6fe-d6ee-4840-8ae2-cde3db4a989b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.922073 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0269d6fe-d6ee-4840-8ae2-cde3db4a989b" (UID: "0269d6fe-d6ee-4840-8ae2-cde3db4a989b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.922854 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0269d6fe-d6ee-4840-8ae2-cde3db4a989b" (UID: "0269d6fe-d6ee-4840-8ae2-cde3db4a989b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.936922 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0269d6fe-d6ee-4840-8ae2-cde3db4a989b" (UID: "0269d6fe-d6ee-4840-8ae2-cde3db4a989b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.962554 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0269d6fe-d6ee-4840-8ae2-cde3db4a989b" (UID: "0269d6fe-d6ee-4840-8ae2-cde3db4a989b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.993479 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.993509 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.993521 5014 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.993532 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:52 crc kubenswrapper[5014]: I1205 11:08:52.993540 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0269d6fe-d6ee-4840-8ae2-cde3db4a989b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.064721 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-569f7f9774-fb89t"] Dec 05 11:08:53 crc kubenswrapper[5014]: W1205 11:08:53.080334 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce101ba1_a588_4de6_bac6_964f608c509d.slice/crio-d0507a1674953bee963913c4f85e9ab802803b1c93f635141652d4e71ebbe973 WatchSource:0}: Error finding container d0507a1674953bee963913c4f85e9ab802803b1c93f635141652d4e71ebbe973: Status 404 returned error can't find the container with id d0507a1674953bee963913c4f85e9ab802803b1c93f635141652d4e71ebbe973 Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.379424 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5d74b89875-gnlqf"] Dec 05 11:08:53 crc kubenswrapper[5014]: E1205 11:08:53.379799 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0269d6fe-d6ee-4840-8ae2-cde3db4a989b" containerName="dnsmasq-dns" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.379818 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="0269d6fe-d6ee-4840-8ae2-cde3db4a989b" containerName="dnsmasq-dns" Dec 05 11:08:53 crc kubenswrapper[5014]: E1205 11:08:53.379836 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0269d6fe-d6ee-4840-8ae2-cde3db4a989b" containerName="init" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.379846 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="0269d6fe-d6ee-4840-8ae2-cde3db4a989b" containerName="init" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.380051 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="0269d6fe-d6ee-4840-8ae2-cde3db4a989b" containerName="dnsmasq-dns" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.387651 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5d74b89875-gnlqf"] Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.388443 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.394863 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.395119 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.507746 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zts5t\" (UniqueName: \"kubernetes.io/projected/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-kube-api-access-zts5t\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.507860 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-combined-ca-bundle\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.507889 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-config\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.507920 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-httpd-config\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.507969 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-ovndb-tls-certs\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.508027 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-internal-tls-certs\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.508062 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-public-tls-certs\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.610310 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-combined-ca-bundle\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.610352 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-config\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.610386 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-httpd-config\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.610413 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-ovndb-tls-certs\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.610455 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-internal-tls-certs\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.610478 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-public-tls-certs\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.610531 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zts5t\" (UniqueName: \"kubernetes.io/projected/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-kube-api-access-zts5t\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.615820 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-ovndb-tls-certs\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.617805 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-combined-ca-bundle\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.617926 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-internal-tls-certs\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.618340 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-config\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.618945 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-httpd-config\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.619572 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-public-tls-certs\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.628180 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zts5t\" (UniqueName: \"kubernetes.io/projected/91f750dc-c2ab-4b76-b659-4f5e11bf2e85-kube-api-access-zts5t\") pod \"neutron-5d74b89875-gnlqf\" (UID: \"91f750dc-c2ab-4b76-b659-4f5e11bf2e85\") " pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.698987 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" event={"ID":"9cfdc764-b85e-48e8-8a0e-0945c00f278f","Type":"ContainerStarted","Data":"4edfb9c48ca26a1fdf93acfac426d7fc75c728f4264b6030ac7c9040fcefb711"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.699315 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" event={"ID":"9cfdc764-b85e-48e8-8a0e-0945c00f278f","Type":"ContainerStarted","Data":"a0987a0d41ad78bc824b9264f85fab92f8d5f124af52f2359125f3512619a31d"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.704915 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f7f9774-fb89t" event={"ID":"ce101ba1-a588-4de6-bac6-964f608c509d","Type":"ContainerStarted","Data":"ae477d35ef3fbc3e04bdb3118a0fc729e46f7083ca03ac71211a35d4b6a353a1"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.704987 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f7f9774-fb89t" event={"ID":"ce101ba1-a588-4de6-bac6-964f608c509d","Type":"ContainerStarted","Data":"d0507a1674953bee963913c4f85e9ab802803b1c93f635141652d4e71ebbe973"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.712751 5014 generic.go:334] "Generic (PLEG): container finished" podID="5693e3c7-0809-4e13-9e46-315780139182" containerID="ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b" exitCode=0 Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.712896 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" event={"ID":"5693e3c7-0809-4e13-9e46-315780139182","Type":"ContainerDied","Data":"ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.716442 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" event={"ID":"0269d6fe-d6ee-4840-8ae2-cde3db4a989b","Type":"ContainerDied","Data":"cfe3dbe88a69a20832b284895cdc94e0deb847895c2fba121f7d1f760b6ddc8a"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.716493 5014 scope.go:117] "RemoveContainer" containerID="cf77c114e27f181b7dfb00657c7669c6d5c1e2fbac29c92fa79e397f13de5d32" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.716608 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-krrbp" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.720776 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78b5c6757c-hdtxh" event={"ID":"7a5e0260-dfe5-4f24-82bc-e172af4db809","Type":"ContainerStarted","Data":"183108fbe5b7ccab7ac4b6831355d16049e59a529a47080bd9be405d72029d1c"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.720824 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78b5c6757c-hdtxh" event={"ID":"7a5e0260-dfe5-4f24-82bc-e172af4db809","Type":"ContainerStarted","Data":"853eb1a7e15604dd8c5e04ad59821d351231f845f05385b89e0aa0d66af5cbed"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.730294 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c6dbf5d74-pbtjs" event={"ID":"65c45e15-99d6-4c93-ae6e-67bd07e7eba9","Type":"ContainerStarted","Data":"a6bba24fdcbcfeedebec9cc96b534b92ea333d4487c226302097c64b12265029"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.730473 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c6dbf5d74-pbtjs" event={"ID":"65c45e15-99d6-4c93-ae6e-67bd07e7eba9","Type":"ContainerStarted","Data":"725925d461df5f5ca76fe188aea487a2ad1bb7f158ae36e8f648524cc35919e7"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.730503 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c6dbf5d74-pbtjs" event={"ID":"65c45e15-99d6-4c93-ae6e-67bd07e7eba9","Type":"ContainerStarted","Data":"62047f3c31e0f181ca0f5076b509087d5f25eba61002a684c954bdab80173165"} Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.730519 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.730543 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.741710 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-c76ffd784-m8mzt" podStartSLOduration=3.132011996 podStartE2EDuration="7.741683179s" podCreationTimestamp="2025-12-05 11:08:46 +0000 UTC" firstStartedPulling="2025-12-05 11:08:47.591452149 +0000 UTC m=+1254.539569853" lastFinishedPulling="2025-12-05 11:08:52.201123332 +0000 UTC m=+1259.149241036" observedRunningTime="2025-12-05 11:08:53.731931004 +0000 UTC m=+1260.680048698" watchObservedRunningTime="2025-12-05 11:08:53.741683179 +0000 UTC m=+1260.689801353" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.757504 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-c6dbf5d74-pbtjs" podStartSLOduration=3.757485029 podStartE2EDuration="3.757485029s" podCreationTimestamp="2025-12-05 11:08:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:53.756649809 +0000 UTC m=+1260.704767513" watchObservedRunningTime="2025-12-05 11:08:53.757485029 +0000 UTC m=+1260.705602733" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.762207 5014 scope.go:117] "RemoveContainer" containerID="f141cba59289beaf1b5a6ab1951d9dbd6333a8520fa64b3803f98740b818fc0c" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.835495 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:08:53 crc kubenswrapper[5014]: I1205 11:08:53.975930 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-78b5c6757c-hdtxh" podStartSLOduration=3.69562415 podStartE2EDuration="7.975910915s" podCreationTimestamp="2025-12-05 11:08:46 +0000 UTC" firstStartedPulling="2025-12-05 11:08:47.907555356 +0000 UTC m=+1254.855673060" lastFinishedPulling="2025-12-05 11:08:52.187842121 +0000 UTC m=+1259.135959825" observedRunningTime="2025-12-05 11:08:53.816669904 +0000 UTC m=+1260.764787618" watchObservedRunningTime="2025-12-05 11:08:53.975910915 +0000 UTC m=+1260.924028619" Dec 05 11:08:54 crc kubenswrapper[5014]: I1205 11:08:54.012331 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-krrbp"] Dec 05 11:08:54 crc kubenswrapper[5014]: I1205 11:08:54.021556 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-krrbp"] Dec 05 11:08:54 crc kubenswrapper[5014]: I1205 11:08:54.628185 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5d74b89875-gnlqf"] Dec 05 11:08:54 crc kubenswrapper[5014]: I1205 11:08:54.746821 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" event={"ID":"5693e3c7-0809-4e13-9e46-315780139182","Type":"ContainerStarted","Data":"eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a"} Dec 05 11:08:54 crc kubenswrapper[5014]: I1205 11:08:54.748250 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:08:54 crc kubenswrapper[5014]: I1205 11:08:54.753894 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f7f9774-fb89t" event={"ID":"ce101ba1-a588-4de6-bac6-964f608c509d","Type":"ContainerStarted","Data":"ca26be0cf7abe096de9c9490cfe3b0c3074f3a897a905edae548fcde4ad09208"} Dec 05 11:08:54 crc kubenswrapper[5014]: I1205 11:08:54.754647 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:08:54 crc kubenswrapper[5014]: I1205 11:08:54.781830 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" podStartSLOduration=5.781805472 podStartE2EDuration="5.781805472s" podCreationTimestamp="2025-12-05 11:08:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:54.774699851 +0000 UTC m=+1261.722817555" watchObservedRunningTime="2025-12-05 11:08:54.781805472 +0000 UTC m=+1261.729923186" Dec 05 11:08:55 crc kubenswrapper[5014]: I1205 11:08:55.336089 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0269d6fe-d6ee-4840-8ae2-cde3db4a989b" path="/var/lib/kubelet/pods/0269d6fe-d6ee-4840-8ae2-cde3db4a989b/volumes" Dec 05 11:08:56 crc kubenswrapper[5014]: I1205 11:08:56.777249 5014 generic.go:334] "Generic (PLEG): container finished" podID="36756ede-ab38-444f-8f4a-a07da8173882" containerID="374859f6acd833eec3d01eb214ae18360f03c0bb616482cedb65d51ec5502169" exitCode=0 Dec 05 11:08:56 crc kubenswrapper[5014]: I1205 11:08:56.777326 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-zshwt" event={"ID":"36756ede-ab38-444f-8f4a-a07da8173882","Type":"ContainerDied","Data":"374859f6acd833eec3d01eb214ae18360f03c0bb616482cedb65d51ec5502169"} Dec 05 11:08:56 crc kubenswrapper[5014]: I1205 11:08:56.798550 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-569f7f9774-fb89t" podStartSLOduration=6.798531449 podStartE2EDuration="6.798531449s" podCreationTimestamp="2025-12-05 11:08:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:08:54.805064202 +0000 UTC m=+1261.753181916" watchObservedRunningTime="2025-12-05 11:08:56.798531449 +0000 UTC m=+1263.746649153" Dec 05 11:08:58 crc kubenswrapper[5014]: I1205 11:08:58.630623 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:08:58 crc kubenswrapper[5014]: I1205 11:08:58.641967 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:09:00 crc kubenswrapper[5014]: I1205 11:09:00.222025 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:09:00 crc kubenswrapper[5014]: I1205 11:09:00.291216 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-r7jbd"] Dec 05 11:09:00 crc kubenswrapper[5014]: I1205 11:09:00.291527 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" podUID="483692d0-4e6f-4f30-a62a-842d34670072" containerName="dnsmasq-dns" containerID="cri-o://30d152ca4d5b905f15a3c91004df94ab14d5d209d3c96c2d3e1789c65ee7102c" gracePeriod=10 Dec 05 11:09:00 crc kubenswrapper[5014]: I1205 11:09:00.827657 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d74b89875-gnlqf" event={"ID":"91f750dc-c2ab-4b76-b659-4f5e11bf2e85","Type":"ContainerStarted","Data":"35a881f2a8a718ff65973f0b892e57f1eb447345c1287ac4409feac3ac5b1a02"} Dec 05 11:09:00 crc kubenswrapper[5014]: I1205 11:09:00.831446 5014 generic.go:334] "Generic (PLEG): container finished" podID="483692d0-4e6f-4f30-a62a-842d34670072" containerID="30d152ca4d5b905f15a3c91004df94ab14d5d209d3c96c2d3e1789c65ee7102c" exitCode=0 Dec 05 11:09:00 crc kubenswrapper[5014]: I1205 11:09:00.831507 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" event={"ID":"483692d0-4e6f-4f30-a62a-842d34670072","Type":"ContainerDied","Data":"30d152ca4d5b905f15a3c91004df94ab14d5d209d3c96c2d3e1789c65ee7102c"} Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.482847 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-zshwt" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.540717 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-db-sync-config-data\") pod \"36756ede-ab38-444f-8f4a-a07da8173882\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.541069 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-combined-ca-bundle\") pod \"36756ede-ab38-444f-8f4a-a07da8173882\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.541110 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-scripts\") pod \"36756ede-ab38-444f-8f4a-a07da8173882\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.541152 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36756ede-ab38-444f-8f4a-a07da8173882-etc-machine-id\") pod \"36756ede-ab38-444f-8f4a-a07da8173882\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.541169 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-config-data\") pod \"36756ede-ab38-444f-8f4a-a07da8173882\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.541392 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrthj\" (UniqueName: \"kubernetes.io/projected/36756ede-ab38-444f-8f4a-a07da8173882-kube-api-access-xrthj\") pod \"36756ede-ab38-444f-8f4a-a07da8173882\" (UID: \"36756ede-ab38-444f-8f4a-a07da8173882\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.543422 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36756ede-ab38-444f-8f4a-a07da8173882-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "36756ede-ab38-444f-8f4a-a07da8173882" (UID: "36756ede-ab38-444f-8f4a-a07da8173882"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.557143 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-scripts" (OuterVolumeSpecName: "scripts") pod "36756ede-ab38-444f-8f4a-a07da8173882" (UID: "36756ede-ab38-444f-8f4a-a07da8173882"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.559456 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "36756ede-ab38-444f-8f4a-a07da8173882" (UID: "36756ede-ab38-444f-8f4a-a07da8173882"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.567482 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36756ede-ab38-444f-8f4a-a07da8173882-kube-api-access-xrthj" (OuterVolumeSpecName: "kube-api-access-xrthj") pod "36756ede-ab38-444f-8f4a-a07da8173882" (UID: "36756ede-ab38-444f-8f4a-a07da8173882"). InnerVolumeSpecName "kube-api-access-xrthj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.624922 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36756ede-ab38-444f-8f4a-a07da8173882" (UID: "36756ede-ab38-444f-8f4a-a07da8173882"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.625193 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-config-data" (OuterVolumeSpecName: "config-data") pod "36756ede-ab38-444f-8f4a-a07da8173882" (UID: "36756ede-ab38-444f-8f4a-a07da8173882"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.643563 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.643607 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.643616 5014 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36756ede-ab38-444f-8f4a-a07da8173882-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.643625 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.643634 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrthj\" (UniqueName: \"kubernetes.io/projected/36756ede-ab38-444f-8f4a-a07da8173882-kube-api-access-xrthj\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.643645 5014 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36756ede-ab38-444f-8f4a-a07da8173882-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.702046 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.846791 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jp54k\" (UniqueName: \"kubernetes.io/projected/483692d0-4e6f-4f30-a62a-842d34670072-kube-api-access-jp54k\") pod \"483692d0-4e6f-4f30-a62a-842d34670072\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.847151 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-swift-storage-0\") pod \"483692d0-4e6f-4f30-a62a-842d34670072\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.847234 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-config\") pod \"483692d0-4e6f-4f30-a62a-842d34670072\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.847313 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-nb\") pod \"483692d0-4e6f-4f30-a62a-842d34670072\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.847512 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-svc\") pod \"483692d0-4e6f-4f30-a62a-842d34670072\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.847591 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-sb\") pod \"483692d0-4e6f-4f30-a62a-842d34670072\" (UID: \"483692d0-4e6f-4f30-a62a-842d34670072\") " Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.854790 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/483692d0-4e6f-4f30-a62a-842d34670072-kube-api-access-jp54k" (OuterVolumeSpecName: "kube-api-access-jp54k") pod "483692d0-4e6f-4f30-a62a-842d34670072" (UID: "483692d0-4e6f-4f30-a62a-842d34670072"). InnerVolumeSpecName "kube-api-access-jp54k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: E1205 11:09:01.861403 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.882041 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" event={"ID":"483692d0-4e6f-4f30-a62a-842d34670072","Type":"ContainerDied","Data":"889cabd41495db5735f96197d6ee1d2b8612170be51e02edf1c41c58d2325072"} Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.882108 5014 scope.go:117] "RemoveContainer" containerID="30d152ca4d5b905f15a3c91004df94ab14d5d209d3c96c2d3e1789c65ee7102c" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.882242 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-r7jbd" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.892958 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-zshwt" event={"ID":"36756ede-ab38-444f-8f4a-a07da8173882","Type":"ContainerDied","Data":"a91de4f6f95b643bc5dd95095ddf181d01ca3ee5d968aef4960766f2fb3b2774"} Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.893004 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a91de4f6f95b643bc5dd95095ddf181d01ca3ee5d968aef4960766f2fb3b2774" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.893088 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-zshwt" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.906892 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d74b89875-gnlqf" event={"ID":"91f750dc-c2ab-4b76-b659-4f5e11bf2e85","Type":"ContainerStarted","Data":"a5de0be04a5281ec2c49d84282baa537708bf137302768d46f4225c3bd61e9d4"} Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.935501 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "483692d0-4e6f-4f30-a62a-842d34670072" (UID: "483692d0-4e6f-4f30-a62a-842d34670072"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.936412 5014 scope.go:117] "RemoveContainer" containerID="569774acfa88baa28e2bebcdcef58e9a75018bec3bed755a48d4e0ab13ac423a" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.940535 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-config" (OuterVolumeSpecName: "config") pod "483692d0-4e6f-4f30-a62a-842d34670072" (UID: "483692d0-4e6f-4f30-a62a-842d34670072"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.945388 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "483692d0-4e6f-4f30-a62a-842d34670072" (UID: "483692d0-4e6f-4f30-a62a-842d34670072"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.950635 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.950665 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jp54k\" (UniqueName: \"kubernetes.io/projected/483692d0-4e6f-4f30-a62a-842d34670072-kube-api-access-jp54k\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.950676 5014 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.950684 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.964497 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "483692d0-4e6f-4f30-a62a-842d34670072" (UID: "483692d0-4e6f-4f30-a62a-842d34670072"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:01 crc kubenswrapper[5014]: I1205 11:09:01.966460 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "483692d0-4e6f-4f30-a62a-842d34670072" (UID: "483692d0-4e6f-4f30-a62a-842d34670072"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.052410 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.052453 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/483692d0-4e6f-4f30-a62a-842d34670072-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.243013 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-r7jbd"] Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.256616 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-r7jbd"] Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.701349 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.745987 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.837501 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:09:02 crc kubenswrapper[5014]: E1205 11:09:02.837989 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="483692d0-4e6f-4f30-a62a-842d34670072" containerName="dnsmasq-dns" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.838009 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="483692d0-4e6f-4f30-a62a-842d34670072" containerName="dnsmasq-dns" Dec 05 11:09:02 crc kubenswrapper[5014]: E1205 11:09:02.838023 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="483692d0-4e6f-4f30-a62a-842d34670072" containerName="init" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.838031 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="483692d0-4e6f-4f30-a62a-842d34670072" containerName="init" Dec 05 11:09:02 crc kubenswrapper[5014]: E1205 11:09:02.838050 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36756ede-ab38-444f-8f4a-a07da8173882" containerName="cinder-db-sync" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.838059 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="36756ede-ab38-444f-8f4a-a07da8173882" containerName="cinder-db-sync" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.838382 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="36756ede-ab38-444f-8f4a-a07da8173882" containerName="cinder-db-sync" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.838400 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="483692d0-4e6f-4f30-a62a-842d34670072" containerName="dnsmasq-dns" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.839645 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.843039 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.843239 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.844143 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.844485 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-5rgsd" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.897342 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.941155 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.941205 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.941256 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.942042 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e69f8ff3539ebba47e81dc1689f38b27a404e4706e334acbe1fa267156045c14"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.942101 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://e69f8ff3539ebba47e81dc1689f38b27a404e4706e334acbe1fa267156045c14" gracePeriod=600 Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.967798 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-pp7kf"] Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.969976 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-c6dbf5d74-pbtjs" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.970084 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.972030 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.972085 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.972120 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-scripts\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.972245 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.972248 5014 generic.go:334] "Generic (PLEG): container finished" podID="803d83df-f847-425f-895a-4b1ea26e6868" containerID="68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9" exitCode=137 Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.972287 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcnbs\" (UniqueName: \"kubernetes.io/projected/31848c45-5068-47bb-899e-5e4240ff1886-kube-api-access-mcnbs\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.972330 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31848c45-5068-47bb-899e-5e4240ff1886-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.972371 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-757c4b6dbf-492tq" event={"ID":"803d83df-f847-425f-895a-4b1ea26e6868","Type":"ContainerDied","Data":"68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9"} Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.985451 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03429d0b-f6d7-4b47-8dd9-475bf3c88881","Type":"ContainerStarted","Data":"a3ff08cf44f46bcf7afef2fe8932efc9b21cc82e563a0c280fc59d74c21f8ed3"} Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.985500 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="ceilometer-notification-agent" containerID="cri-o://46aaa88e1720862bf3073fdaae83dfb767f99ab32457cf45b16047437b4b6062" gracePeriod=30 Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.985571 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.985595 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="proxy-httpd" containerID="cri-o://a3ff08cf44f46bcf7afef2fe8932efc9b21cc82e563a0c280fc59d74c21f8ed3" gracePeriod=30 Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.985632 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="sg-core" containerID="cri-o://bb5125794779a1e980da5772450649d5158cbe00df3d1bfea79e7f1f807bb4da" gracePeriod=30 Dec 05 11:09:02 crc kubenswrapper[5014]: I1205 11:09:02.998845 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-pp7kf"] Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.013589 5014 generic.go:334] "Generic (PLEG): container finished" podID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" containerID="aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52" exitCode=137 Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.013711 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-865b65b5c7-qjg6b" event={"ID":"d7407954-f41a-48ad-8cda-8c165c4fb5b8","Type":"ContainerDied","Data":"aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52"} Dec 05 11:09:03 crc kubenswrapper[5014]: E1205 11:09:03.027458 5014 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/0decb20b81fbc010cf7f6d38273810c6414b23f5b0ac7a734737079274ecb537/diff" to get inode usage: stat /var/lib/containers/storage/overlay/0decb20b81fbc010cf7f6d38273810c6414b23f5b0ac7a734737079274ecb537/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_dnsmasq-dns-57c957c4ff-r7jbd_483692d0-4e6f-4f30-a62a-842d34670072/dnsmasq-dns/0.log" to get inode usage: stat /var/log/pods/openstack_dnsmasq-dns-57c957c4ff-r7jbd_483692d0-4e6f-4f30-a62a-842d34670072/dnsmasq-dns/0.log: no such file or directory Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076369 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076405 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcnbs\" (UniqueName: \"kubernetes.io/projected/31848c45-5068-47bb-899e-5e4240ff1886-kube-api-access-mcnbs\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076433 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076455 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31848c45-5068-47bb-899e-5e4240ff1886-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076512 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076553 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076575 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-scripts\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076594 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwzbg\" (UniqueName: \"kubernetes.io/projected/cd11abe8-3b00-430e-bade-62fd4e9047b6-kube-api-access-xwzbg\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076612 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076648 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076673 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-config\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.076689 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.080144 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31848c45-5068-47bb-899e-5e4240ff1886-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.081249 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d74b89875-gnlqf" event={"ID":"91f750dc-c2ab-4b76-b659-4f5e11bf2e85","Type":"ContainerStarted","Data":"461a9c57bf57b9cbe0471ef88539964725dfc04d83adf3380658fe5030018bf9"} Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.082011 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.084312 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-scripts\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.088843 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.100255 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.107483 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.135886 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-777967dc98-c927p"] Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.136175 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-777967dc98-c927p" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api-log" containerID="cri-o://3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c" gracePeriod=30 Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.136348 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-777967dc98-c927p" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api" containerID="cri-o://bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e" gracePeriod=30 Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.141996 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcnbs\" (UniqueName: \"kubernetes.io/projected/31848c45-5068-47bb-899e-5e4240ff1886-kube-api-access-mcnbs\") pod \"cinder-scheduler-0\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.167792 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-777967dc98-c927p" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": EOF" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.184319 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwzbg\" (UniqueName: \"kubernetes.io/projected/cd11abe8-3b00-430e-bade-62fd4e9047b6-kube-api-access-xwzbg\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.184390 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.184487 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.184587 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-config\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.184633 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.185023 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.194870 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.212532 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.214897 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.216172 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.217243 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-config\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.219795 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.295462 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.299059 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.301499 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.304654 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwzbg\" (UniqueName: \"kubernetes.io/projected/cd11abe8-3b00-430e-bade-62fd4e9047b6-kube-api-access-xwzbg\") pod \"dnsmasq-dns-6bb4fc677f-pp7kf\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.385098 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="483692d0-4e6f-4f30-a62a-842d34670072" path="/var/lib/kubelet/pods/483692d0-4e6f-4f30-a62a-842d34670072/volumes" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.393726 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.407809 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2dffaadf-cdd0-4e00-b797-ce14b5be714b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.407904 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.407944 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2dffaadf-cdd0-4e00-b797-ce14b5be714b-logs\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.407980 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lqb7\" (UniqueName: \"kubernetes.io/projected/2dffaadf-cdd0-4e00-b797-ce14b5be714b-kube-api-access-9lqb7\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.408008 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data-custom\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.408043 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-scripts\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.408154 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.430118 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5d74b89875-gnlqf" podStartSLOduration=10.430098403 podStartE2EDuration="10.430098403s" podCreationTimestamp="2025-12-05 11:08:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:03.161454408 +0000 UTC m=+1270.109572122" watchObservedRunningTime="2025-12-05 11:09:03.430098403 +0000 UTC m=+1270.378216107" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.442216 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.512238 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lqb7\" (UniqueName: \"kubernetes.io/projected/2dffaadf-cdd0-4e00-b797-ce14b5be714b-kube-api-access-9lqb7\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.512324 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data-custom\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.512373 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-scripts\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.512491 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.520128 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-scripts\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.522932 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2dffaadf-cdd0-4e00-b797-ce14b5be714b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.522995 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.523040 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2dffaadf-cdd0-4e00-b797-ce14b5be714b-logs\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.523211 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2dffaadf-cdd0-4e00-b797-ce14b5be714b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.531298 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.532822 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2dffaadf-cdd0-4e00-b797-ce14b5be714b-logs\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.537936 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.541847 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.541854 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lqb7\" (UniqueName: \"kubernetes.io/projected/2dffaadf-cdd0-4e00-b797-ce14b5be714b-kube-api-access-9lqb7\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.564742 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data-custom\") pod \"cinder-api-0\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " pod="openstack/cinder-api-0" Dec 05 11:09:03 crc kubenswrapper[5014]: I1205 11:09:03.722080 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.102652 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.155719 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.156681 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-scripts\") pod \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.156911 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7407954-f41a-48ad-8cda-8c165c4fb5b8-logs\") pod \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.156960 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-config-data\") pod \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.157019 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz2z2\" (UniqueName: \"kubernetes.io/projected/d7407954-f41a-48ad-8cda-8c165c4fb5b8-kube-api-access-zz2z2\") pod \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.157045 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d7407954-f41a-48ad-8cda-8c165c4fb5b8-horizon-secret-key\") pod \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\" (UID: \"d7407954-f41a-48ad-8cda-8c165c4fb5b8\") " Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.159226 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7407954-f41a-48ad-8cda-8c165c4fb5b8-logs" (OuterVolumeSpecName: "logs") pod "d7407954-f41a-48ad-8cda-8c165c4fb5b8" (UID: "d7407954-f41a-48ad-8cda-8c165c4fb5b8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.182242 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7407954-f41a-48ad-8cda-8c165c4fb5b8-kube-api-access-zz2z2" (OuterVolumeSpecName: "kube-api-access-zz2z2") pod "d7407954-f41a-48ad-8cda-8c165c4fb5b8" (UID: "d7407954-f41a-48ad-8cda-8c165c4fb5b8"). InnerVolumeSpecName "kube-api-access-zz2z2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.183294 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7407954-f41a-48ad-8cda-8c165c4fb5b8-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d7407954-f41a-48ad-8cda-8c165c4fb5b8" (UID: "d7407954-f41a-48ad-8cda-8c165c4fb5b8"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.208639 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-scripts" (OuterVolumeSpecName: "scripts") pod "d7407954-f41a-48ad-8cda-8c165c4fb5b8" (UID: "d7407954-f41a-48ad-8cda-8c165c4fb5b8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.215131 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-config-data" (OuterVolumeSpecName: "config-data") pod "d7407954-f41a-48ad-8cda-8c165c4fb5b8" (UID: "d7407954-f41a-48ad-8cda-8c165c4fb5b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.219233 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.221209 5014 generic.go:334] "Generic (PLEG): container finished" podID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerID="a3ff08cf44f46bcf7afef2fe8932efc9b21cc82e563a0c280fc59d74c21f8ed3" exitCode=0 Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.221358 5014 generic.go:334] "Generic (PLEG): container finished" podID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerID="bb5125794779a1e980da5772450649d5158cbe00df3d1bfea79e7f1f807bb4da" exitCode=2 Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.221456 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03429d0b-f6d7-4b47-8dd9-475bf3c88881","Type":"ContainerDied","Data":"a3ff08cf44f46bcf7afef2fe8932efc9b21cc82e563a0c280fc59d74c21f8ed3"} Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.221544 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03429d0b-f6d7-4b47-8dd9-475bf3c88881","Type":"ContainerDied","Data":"bb5125794779a1e980da5772450649d5158cbe00df3d1bfea79e7f1f807bb4da"} Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.235335 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="e69f8ff3539ebba47e81dc1689f38b27a404e4706e334acbe1fa267156045c14" exitCode=0 Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.235424 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"e69f8ff3539ebba47e81dc1689f38b27a404e4706e334acbe1fa267156045c14"} Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.236016 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"0d6d61cc21a88a778a6896c0ce3a742c000804cdd014c81b67f82fc215c25138"} Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.236039 5014 scope.go:117] "RemoveContainer" containerID="4ddf8f910e52a088784fd2d469973cf4512542c6f65d5608f61ef0af3d2944f1" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.253206 5014 generic.go:334] "Generic (PLEG): container finished" podID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" containerID="49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52" exitCode=137 Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.253305 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-865b65b5c7-qjg6b" event={"ID":"d7407954-f41a-48ad-8cda-8c165c4fb5b8","Type":"ContainerDied","Data":"49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52"} Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.253361 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-865b65b5c7-qjg6b" event={"ID":"d7407954-f41a-48ad-8cda-8c165c4fb5b8","Type":"ContainerDied","Data":"76e2b80b41fdec2764ffd4cfcd5d0e0dac5d097db406fa614cd511b1d5e5dd61"} Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.253426 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-865b65b5c7-qjg6b" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.258590 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7nsp\" (UniqueName: \"kubernetes.io/projected/803d83df-f847-425f-895a-4b1ea26e6868-kube-api-access-m7nsp\") pod \"803d83df-f847-425f-895a-4b1ea26e6868\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.258669 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/803d83df-f847-425f-895a-4b1ea26e6868-horizon-secret-key\") pod \"803d83df-f847-425f-895a-4b1ea26e6868\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.258867 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/803d83df-f847-425f-895a-4b1ea26e6868-logs\") pod \"803d83df-f847-425f-895a-4b1ea26e6868\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.258918 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-config-data\") pod \"803d83df-f847-425f-895a-4b1ea26e6868\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.258940 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-scripts\") pod \"803d83df-f847-425f-895a-4b1ea26e6868\" (UID: \"803d83df-f847-425f-895a-4b1ea26e6868\") " Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.265606 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/803d83df-f847-425f-895a-4b1ea26e6868-logs" (OuterVolumeSpecName: "logs") pod "803d83df-f847-425f-895a-4b1ea26e6868" (UID: "803d83df-f847-425f-895a-4b1ea26e6868"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.272541 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7407954-f41a-48ad-8cda-8c165c4fb5b8-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.273363 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.273895 5014 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d7407954-f41a-48ad-8cda-8c165c4fb5b8-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.273946 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz2z2\" (UniqueName: \"kubernetes.io/projected/d7407954-f41a-48ad-8cda-8c165c4fb5b8-kube-api-access-zz2z2\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.273961 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d7407954-f41a-48ad-8cda-8c165c4fb5b8-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.274262 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/803d83df-f847-425f-895a-4b1ea26e6868-kube-api-access-m7nsp" (OuterVolumeSpecName: "kube-api-access-m7nsp") pod "803d83df-f847-425f-895a-4b1ea26e6868" (UID: "803d83df-f847-425f-895a-4b1ea26e6868"). InnerVolumeSpecName "kube-api-access-m7nsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.276615 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/803d83df-f847-425f-895a-4b1ea26e6868-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "803d83df-f847-425f-895a-4b1ea26e6868" (UID: "803d83df-f847-425f-895a-4b1ea26e6868"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.278111 5014 generic.go:334] "Generic (PLEG): container finished" podID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerID="3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c" exitCode=143 Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.278225 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-777967dc98-c927p" event={"ID":"e2ed7afe-3f44-4214-b156-0404222f92a8","Type":"ContainerDied","Data":"3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c"} Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.288807 5014 generic.go:334] "Generic (PLEG): container finished" podID="803d83df-f847-425f-895a-4b1ea26e6868" containerID="6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee" exitCode=137 Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.290439 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-scripts" (OuterVolumeSpecName: "scripts") pod "803d83df-f847-425f-895a-4b1ea26e6868" (UID: "803d83df-f847-425f-895a-4b1ea26e6868"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.290501 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-757c4b6dbf-492tq" event={"ID":"803d83df-f847-425f-895a-4b1ea26e6868","Type":"ContainerDied","Data":"6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee"} Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.290527 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-757c4b6dbf-492tq" event={"ID":"803d83df-f847-425f-895a-4b1ea26e6868","Type":"ContainerDied","Data":"b027654e3da5ff07fc9103670a6e17536a905eb7cc099ebba5e529394c2da67b"} Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.291795 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-757c4b6dbf-492tq" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.350626 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-config-data" (OuterVolumeSpecName: "config-data") pod "803d83df-f847-425f-895a-4b1ea26e6868" (UID: "803d83df-f847-425f-895a-4b1ea26e6868"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.376130 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7nsp\" (UniqueName: \"kubernetes.io/projected/803d83df-f847-425f-895a-4b1ea26e6868-kube-api-access-m7nsp\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.376179 5014 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/803d83df-f847-425f-895a-4b1ea26e6868-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.376192 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/803d83df-f847-425f-895a-4b1ea26e6868-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.376203 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.376494 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/803d83df-f847-425f-895a-4b1ea26e6868-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.462941 5014 scope.go:117] "RemoveContainer" containerID="49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.467071 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-pp7kf"] Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.506431 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-865b65b5c7-qjg6b"] Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.514308 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-865b65b5c7-qjg6b"] Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.665450 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-757c4b6dbf-492tq"] Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.700264 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-757c4b6dbf-492tq"] Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.721561 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.746478 5014 scope.go:117] "RemoveContainer" containerID="aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.821644 5014 scope.go:117] "RemoveContainer" containerID="49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52" Dec 05 11:09:04 crc kubenswrapper[5014]: E1205 11:09:04.822380 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52\": container with ID starting with 49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52 not found: ID does not exist" containerID="49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.822428 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52"} err="failed to get container status \"49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52\": rpc error: code = NotFound desc = could not find container \"49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52\": container with ID starting with 49321792e50f8a04aa49bf782ad8044feb05f44eb09a0e6878e00d1878db2e52 not found: ID does not exist" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.822455 5014 scope.go:117] "RemoveContainer" containerID="aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52" Dec 05 11:09:04 crc kubenswrapper[5014]: E1205 11:09:04.824312 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52\": container with ID starting with aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52 not found: ID does not exist" containerID="aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.824345 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52"} err="failed to get container status \"aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52\": rpc error: code = NotFound desc = could not find container \"aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52\": container with ID starting with aa831aa641625963252f1eb5f1d6fb6c563df0e7c501d871e6c2979aa2abcc52 not found: ID does not exist" Dec 05 11:09:04 crc kubenswrapper[5014]: I1205 11:09:04.824389 5014 scope.go:117] "RemoveContainer" containerID="6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee" Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.171114 5014 scope.go:117] "RemoveContainer" containerID="68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9" Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.333580 5014 generic.go:334] "Generic (PLEG): container finished" podID="cd11abe8-3b00-430e-bade-62fd4e9047b6" containerID="409fbb13373ac8520932f8e4dae0106a4fc39d6323bbf2bf7df692b326b1c5e5" exitCode=0 Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.345063 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="803d83df-f847-425f-895a-4b1ea26e6868" path="/var/lib/kubelet/pods/803d83df-f847-425f-895a-4b1ea26e6868/volumes" Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.346009 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" path="/var/lib/kubelet/pods/d7407954-f41a-48ad-8cda-8c165c4fb5b8/volumes" Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.346737 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" event={"ID":"cd11abe8-3b00-430e-bade-62fd4e9047b6","Type":"ContainerDied","Data":"409fbb13373ac8520932f8e4dae0106a4fc39d6323bbf2bf7df692b326b1c5e5"} Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.346990 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" event={"ID":"cd11abe8-3b00-430e-bade-62fd4e9047b6","Type":"ContainerStarted","Data":"343eca2baa62f924d83266d2b8f49eacefb8cb46bb779a7db3869b2274a70dce"} Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.352186 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"31848c45-5068-47bb-899e-5e4240ff1886","Type":"ContainerStarted","Data":"72c8d5328bb01cc8642f3b0feea30c8abc2b40ffcc325100e84ed0c614280a4c"} Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.364442 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2dffaadf-cdd0-4e00-b797-ce14b5be714b","Type":"ContainerStarted","Data":"9153710b3bc242d34e0f993ce808dde2a2348b780579eae1b862e450aa6f2012"} Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.739186 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.919539 5014 scope.go:117] "RemoveContainer" containerID="6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee" Dec 05 11:09:05 crc kubenswrapper[5014]: E1205 11:09:05.921110 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee\": container with ID starting with 6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee not found: ID does not exist" containerID="6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee" Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.921228 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee"} err="failed to get container status \"6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee\": rpc error: code = NotFound desc = could not find container \"6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee\": container with ID starting with 6a725493cab7619780af3d171e4339f76ca1985eda7c60b8ff81a9c65089fbee not found: ID does not exist" Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.921836 5014 scope.go:117] "RemoveContainer" containerID="68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9" Dec 05 11:09:05 crc kubenswrapper[5014]: E1205 11:09:05.922216 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9\": container with ID starting with 68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9 not found: ID does not exist" containerID="68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9" Dec 05 11:09:05 crc kubenswrapper[5014]: I1205 11:09:05.922239 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9"} err="failed to get container status \"68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9\": rpc error: code = NotFound desc = could not find container \"68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9\": container with ID starting with 68368db994cd66363edaa6beab010a32208959a8254cc1c0e1ffbfcd6c35f6a9 not found: ID does not exist" Dec 05 11:09:06 crc kubenswrapper[5014]: I1205 11:09:06.212173 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:09:06 crc kubenswrapper[5014]: I1205 11:09:06.402176 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2dffaadf-cdd0-4e00-b797-ce14b5be714b","Type":"ContainerStarted","Data":"bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75"} Dec 05 11:09:06 crc kubenswrapper[5014]: I1205 11:09:06.406778 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" event={"ID":"cd11abe8-3b00-430e-bade-62fd4e9047b6","Type":"ContainerStarted","Data":"04b05ece9dec83e03596ae482c704b765ff2110ee78173c090cf624823fd8438"} Dec 05 11:09:06 crc kubenswrapper[5014]: I1205 11:09:06.407002 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:06 crc kubenswrapper[5014]: I1205 11:09:06.429369 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" podStartSLOduration=4.429335447 podStartE2EDuration="4.429335447s" podCreationTimestamp="2025-12-05 11:09:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:06.425803601 +0000 UTC m=+1273.373921305" watchObservedRunningTime="2025-12-05 11:09:06.429335447 +0000 UTC m=+1273.377453151" Dec 05 11:09:06 crc kubenswrapper[5014]: I1205 11:09:06.551088 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-575d445b9b-l7wlc" Dec 05 11:09:06 crc kubenswrapper[5014]: I1205 11:09:06.608981 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5dd6878f44-n5k2l"] Dec 05 11:09:06 crc kubenswrapper[5014]: I1205 11:09:06.609201 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5dd6878f44-n5k2l" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon-log" containerID="cri-o://dd1f207c6133c61599fa712c228173fded90676011bb29274b175a31f0d78f1a" gracePeriod=30 Dec 05 11:09:06 crc kubenswrapper[5014]: I1205 11:09:06.614751 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5dd6878f44-n5k2l" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon" containerID="cri-o://e3ef6d861eeac7f4670cd6dade931517836e11444cd2462e01b5fe72fb3766cb" gracePeriod=30 Dec 05 11:09:07 crc kubenswrapper[5014]: I1205 11:09:07.416980 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"31848c45-5068-47bb-899e-5e4240ff1886","Type":"ContainerStarted","Data":"bf696e78b067a9ac11d94c766bec29cfc7ec9cb4f8b412b592b99c3f1e792e3d"} Dec 05 11:09:07 crc kubenswrapper[5014]: I1205 11:09:07.418694 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"31848c45-5068-47bb-899e-5e4240ff1886","Type":"ContainerStarted","Data":"e0fa949bf6d95664ef0ac7cb734d6420fd98257fea61d2c98d9c21adfc4f4109"} Dec 05 11:09:07 crc kubenswrapper[5014]: I1205 11:09:07.419987 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2dffaadf-cdd0-4e00-b797-ce14b5be714b","Type":"ContainerStarted","Data":"ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add"} Dec 05 11:09:07 crc kubenswrapper[5014]: I1205 11:09:07.420264 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" containerName="cinder-api-log" containerID="cri-o://bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75" gracePeriod=30 Dec 05 11:09:07 crc kubenswrapper[5014]: I1205 11:09:07.420345 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" containerName="cinder-api" containerID="cri-o://ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add" gracePeriod=30 Dec 05 11:09:07 crc kubenswrapper[5014]: I1205 11:09:07.420483 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 11:09:07 crc kubenswrapper[5014]: I1205 11:09:07.445581 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.713569229 podStartE2EDuration="5.445560334s" podCreationTimestamp="2025-12-05 11:09:02 +0000 UTC" firstStartedPulling="2025-12-05 11:09:04.233194871 +0000 UTC m=+1271.181312575" lastFinishedPulling="2025-12-05 11:09:05.965185976 +0000 UTC m=+1272.913303680" observedRunningTime="2025-12-05 11:09:07.438805771 +0000 UTC m=+1274.386923475" watchObservedRunningTime="2025-12-05 11:09:07.445560334 +0000 UTC m=+1274.393678038" Dec 05 11:09:07 crc kubenswrapper[5014]: I1205 11:09:07.481801 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.481784316 podStartE2EDuration="4.481784316s" podCreationTimestamp="2025-12-05 11:09:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:07.478532647 +0000 UTC m=+1274.426650371" watchObservedRunningTime="2025-12-05 11:09:07.481784316 +0000 UTC m=+1274.429902020" Dec 05 11:09:07 crc kubenswrapper[5014]: I1205 11:09:07.609233 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-777967dc98-c927p" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:55522->10.217.0.155:9311: read: connection reset by peer" Dec 05 11:09:07 crc kubenswrapper[5014]: I1205 11:09:07.609233 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-777967dc98-c927p" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:55524->10.217.0.155:9311: read: connection reset by peer" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.165704 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.173393 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212134 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-combined-ca-bundle\") pod \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212201 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data\") pod \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212248 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zsxr\" (UniqueName: \"kubernetes.io/projected/e2ed7afe-3f44-4214-b156-0404222f92a8-kube-api-access-4zsxr\") pod \"e2ed7afe-3f44-4214-b156-0404222f92a8\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212328 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2ed7afe-3f44-4214-b156-0404222f92a8-logs\") pod \"e2ed7afe-3f44-4214-b156-0404222f92a8\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212386 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-combined-ca-bundle\") pod \"e2ed7afe-3f44-4214-b156-0404222f92a8\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212452 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lqb7\" (UniqueName: \"kubernetes.io/projected/2dffaadf-cdd0-4e00-b797-ce14b5be714b-kube-api-access-9lqb7\") pod \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212474 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-scripts\") pod \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212523 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data-custom\") pod \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212549 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data-custom\") pod \"e2ed7afe-3f44-4214-b156-0404222f92a8\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212596 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2dffaadf-cdd0-4e00-b797-ce14b5be714b-logs\") pod \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212648 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data\") pod \"e2ed7afe-3f44-4214-b156-0404222f92a8\" (UID: \"e2ed7afe-3f44-4214-b156-0404222f92a8\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.212683 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2dffaadf-cdd0-4e00-b797-ce14b5be714b-etc-machine-id\") pod \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\" (UID: \"2dffaadf-cdd0-4e00-b797-ce14b5be714b\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.213098 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2ed7afe-3f44-4214-b156-0404222f92a8-logs" (OuterVolumeSpecName: "logs") pod "e2ed7afe-3f44-4214-b156-0404222f92a8" (UID: "e2ed7afe-3f44-4214-b156-0404222f92a8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.213182 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2dffaadf-cdd0-4e00-b797-ce14b5be714b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2dffaadf-cdd0-4e00-b797-ce14b5be714b" (UID: "2dffaadf-cdd0-4e00-b797-ce14b5be714b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.213526 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2dffaadf-cdd0-4e00-b797-ce14b5be714b-logs" (OuterVolumeSpecName: "logs") pod "2dffaadf-cdd0-4e00-b797-ce14b5be714b" (UID: "2dffaadf-cdd0-4e00-b797-ce14b5be714b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.213593 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2ed7afe-3f44-4214-b156-0404222f92a8-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.213611 5014 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2dffaadf-cdd0-4e00-b797-ce14b5be714b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.220524 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.230733 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-scripts" (OuterVolumeSpecName: "scripts") pod "2dffaadf-cdd0-4e00-b797-ce14b5be714b" (UID: "2dffaadf-cdd0-4e00-b797-ce14b5be714b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.230778 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2ed7afe-3f44-4214-b156-0404222f92a8-kube-api-access-4zsxr" (OuterVolumeSpecName: "kube-api-access-4zsxr") pod "e2ed7afe-3f44-4214-b156-0404222f92a8" (UID: "e2ed7afe-3f44-4214-b156-0404222f92a8"). InnerVolumeSpecName "kube-api-access-4zsxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.230859 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dffaadf-cdd0-4e00-b797-ce14b5be714b-kube-api-access-9lqb7" (OuterVolumeSpecName: "kube-api-access-9lqb7") pod "2dffaadf-cdd0-4e00-b797-ce14b5be714b" (UID: "2dffaadf-cdd0-4e00-b797-ce14b5be714b"). InnerVolumeSpecName "kube-api-access-9lqb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.234330 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e2ed7afe-3f44-4214-b156-0404222f92a8" (UID: "e2ed7afe-3f44-4214-b156-0404222f92a8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.234383 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2dffaadf-cdd0-4e00-b797-ce14b5be714b" (UID: "2dffaadf-cdd0-4e00-b797-ce14b5be714b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.259814 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2dffaadf-cdd0-4e00-b797-ce14b5be714b" (UID: "2dffaadf-cdd0-4e00-b797-ce14b5be714b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.261480 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2ed7afe-3f44-4214-b156-0404222f92a8" (UID: "e2ed7afe-3f44-4214-b156-0404222f92a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.279424 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data" (OuterVolumeSpecName: "config-data") pod "2dffaadf-cdd0-4e00-b797-ce14b5be714b" (UID: "2dffaadf-cdd0-4e00-b797-ce14b5be714b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.279624 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data" (OuterVolumeSpecName: "config-data") pod "e2ed7afe-3f44-4214-b156-0404222f92a8" (UID: "e2ed7afe-3f44-4214-b156-0404222f92a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.315549 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2dffaadf-cdd0-4e00-b797-ce14b5be714b-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.316118 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.316141 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.316154 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.316165 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zsxr\" (UniqueName: \"kubernetes.io/projected/e2ed7afe-3f44-4214-b156-0404222f92a8-kube-api-access-4zsxr\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.316177 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.316186 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lqb7\" (UniqueName: \"kubernetes.io/projected/2dffaadf-cdd0-4e00-b797-ce14b5be714b-kube-api-access-9lqb7\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.316196 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.316205 5014 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2dffaadf-cdd0-4e00-b797-ce14b5be714b-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.316215 5014 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2ed7afe-3f44-4214-b156-0404222f92a8-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.433578 5014 generic.go:334] "Generic (PLEG): container finished" podID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerID="bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e" exitCode=0 Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.433648 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-777967dc98-c927p" event={"ID":"e2ed7afe-3f44-4214-b156-0404222f92a8","Type":"ContainerDied","Data":"bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e"} Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.433685 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-777967dc98-c927p" event={"ID":"e2ed7afe-3f44-4214-b156-0404222f92a8","Type":"ContainerDied","Data":"e0e050d3af172c2b5bace84fbd1b31f6aad64fdb52b175a0a27f23e9dca27ca7"} Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.433706 5014 scope.go:117] "RemoveContainer" containerID="bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.433862 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-777967dc98-c927p" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.446692 5014 generic.go:334] "Generic (PLEG): container finished" podID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" containerID="ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add" exitCode=0 Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.446719 5014 generic.go:334] "Generic (PLEG): container finished" podID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" containerID="bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75" exitCode=143 Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.446876 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.450438 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2dffaadf-cdd0-4e00-b797-ce14b5be714b","Type":"ContainerDied","Data":"ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add"} Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.450646 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2dffaadf-cdd0-4e00-b797-ce14b5be714b","Type":"ContainerDied","Data":"bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75"} Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.450775 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2dffaadf-cdd0-4e00-b797-ce14b5be714b","Type":"ContainerDied","Data":"9153710b3bc242d34e0f993ce808dde2a2348b780579eae1b862e450aa6f2012"} Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.457935 5014 generic.go:334] "Generic (PLEG): container finished" podID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerID="46aaa88e1720862bf3073fdaae83dfb767f99ab32457cf45b16047437b4b6062" exitCode=0 Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.458019 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03429d0b-f6d7-4b47-8dd9-475bf3c88881","Type":"ContainerDied","Data":"46aaa88e1720862bf3073fdaae83dfb767f99ab32457cf45b16047437b4b6062"} Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.478658 5014 scope.go:117] "RemoveContainer" containerID="3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.484501 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-777967dc98-c927p"] Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.494435 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-777967dc98-c927p"] Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.510461 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.521060 5014 scope.go:117] "RemoveContainer" containerID="bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e" Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.522100 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e\": container with ID starting with bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e not found: ID does not exist" containerID="bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.522139 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e"} err="failed to get container status \"bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e\": rpc error: code = NotFound desc = could not find container \"bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e\": container with ID starting with bb075a853e1771abfecc082ec4a7186a2a646a83052dfc1c8a48e8407262913e not found: ID does not exist" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.522165 5014 scope.go:117] "RemoveContainer" containerID="3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.522463 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.522801 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c\": container with ID starting with 3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c not found: ID does not exist" containerID="3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.523475 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c"} err="failed to get container status \"3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c\": rpc error: code = NotFound desc = could not find container \"3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c\": container with ID starting with 3e71275e1036efa2fc19793c40b42cb04c48c8c2ec4e738c4c18e82a5cae826c not found: ID does not exist" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.523518 5014 scope.go:117] "RemoveContainer" containerID="ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.562867 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.563350 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api-log" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.563432 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api-log" Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.563465 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="803d83df-f847-425f-895a-4b1ea26e6868" containerName="horizon-log" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.563509 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="803d83df-f847-425f-895a-4b1ea26e6868" containerName="horizon-log" Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.563519 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" containerName="cinder-api" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.563526 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" containerName="cinder-api" Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.563536 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" containerName="cinder-api-log" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.563542 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" containerName="cinder-api-log" Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.563604 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.563615 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api" Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.563638 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" containerName="horizon-log" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.563644 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" containerName="horizon-log" Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.563655 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" containerName="horizon" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.563660 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" containerName="horizon" Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.563674 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="803d83df-f847-425f-895a-4b1ea26e6868" containerName="horizon" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.563679 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="803d83df-f847-425f-895a-4b1ea26e6868" containerName="horizon" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.564056 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" containerName="horizon-log" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.564079 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.564121 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="803d83df-f847-425f-895a-4b1ea26e6868" containerName="horizon" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.564131 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" containerName="cinder-api" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.564143 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7407954-f41a-48ad-8cda-8c165c4fb5b8" containerName="horizon" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.564161 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" containerName="barbican-api-log" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.564169 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="803d83df-f847-425f-895a-4b1ea26e6868" containerName="horizon-log" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.564179 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" containerName="cinder-api-log" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.566080 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.568345 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.568404 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.568344 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.591510 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.623388 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-scripts\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.623529 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-config-data-custom\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.623641 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5e322d23-65da-40e8-b814-815c148aa523-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.623694 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dhnm\" (UniqueName: \"kubernetes.io/projected/5e322d23-65da-40e8-b814-815c148aa523-kube-api-access-5dhnm\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.623722 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-public-tls-certs\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.623778 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-config-data\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.623838 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.623903 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e322d23-65da-40e8-b814-815c148aa523-logs\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.623957 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.642557 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.654570 5014 scope.go:117] "RemoveContainer" containerID="bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.680189 5014 scope.go:117] "RemoveContainer" containerID="ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add" Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.680769 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add\": container with ID starting with ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add not found: ID does not exist" containerID="ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.680828 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add"} err="failed to get container status \"ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add\": rpc error: code = NotFound desc = could not find container \"ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add\": container with ID starting with ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add not found: ID does not exist" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.680860 5014 scope.go:117] "RemoveContainer" containerID="bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75" Dec 05 11:09:08 crc kubenswrapper[5014]: E1205 11:09:08.681374 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75\": container with ID starting with bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75 not found: ID does not exist" containerID="bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.681402 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75"} err="failed to get container status \"bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75\": rpc error: code = NotFound desc = could not find container \"bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75\": container with ID starting with bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75 not found: ID does not exist" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.681425 5014 scope.go:117] "RemoveContainer" containerID="ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.681665 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add"} err="failed to get container status \"ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add\": rpc error: code = NotFound desc = could not find container \"ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add\": container with ID starting with ccdcdff3636c04140eb259a752574c8425c78684ce18250d384be6ede5606add not found: ID does not exist" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.681681 5014 scope.go:117] "RemoveContainer" containerID="bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.681884 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75"} err="failed to get container status \"bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75\": rpc error: code = NotFound desc = could not find container \"bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75\": container with ID starting with bf67dd9cde3b5381c629a6637e0099ba13db5c55dbed241448631da5285c2f75 not found: ID does not exist" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.724747 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-combined-ca-bundle\") pod \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.724837 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-scripts\") pod \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.724877 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlqlk\" (UniqueName: \"kubernetes.io/projected/03429d0b-f6d7-4b47-8dd9-475bf3c88881-kube-api-access-rlqlk\") pod \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.725244 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-config-data\") pod \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.725361 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-log-httpd\") pod \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.725918 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "03429d0b-f6d7-4b47-8dd9-475bf3c88881" (UID: "03429d0b-f6d7-4b47-8dd9-475bf3c88881"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.726103 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-sg-core-conf-yaml\") pod \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.726194 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-run-httpd\") pod \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\" (UID: \"03429d0b-f6d7-4b47-8dd9-475bf3c88881\") " Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.727028 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "03429d0b-f6d7-4b47-8dd9-475bf3c88881" (UID: "03429d0b-f6d7-4b47-8dd9-475bf3c88881"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.727238 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.727467 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-scripts\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.727960 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-config-data-custom\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.728650 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5e322d23-65da-40e8-b814-815c148aa523-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.728765 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dhnm\" (UniqueName: \"kubernetes.io/projected/5e322d23-65da-40e8-b814-815c148aa523-kube-api-access-5dhnm\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.728863 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-public-tls-certs\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.728946 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5e322d23-65da-40e8-b814-815c148aa523-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.729330 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-config-data\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.730545 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.730716 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e322d23-65da-40e8-b814-815c148aa523-logs\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.730827 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-scripts" (OuterVolumeSpecName: "scripts") pod "03429d0b-f6d7-4b47-8dd9-475bf3c88881" (UID: "03429d0b-f6d7-4b47-8dd9-475bf3c88881"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.730876 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03429d0b-f6d7-4b47-8dd9-475bf3c88881-kube-api-access-rlqlk" (OuterVolumeSpecName: "kube-api-access-rlqlk") pod "03429d0b-f6d7-4b47-8dd9-475bf3c88881" (UID: "03429d0b-f6d7-4b47-8dd9-475bf3c88881"). InnerVolumeSpecName "kube-api-access-rlqlk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.731300 5014 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.731397 5014 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03429d0b-f6d7-4b47-8dd9-475bf3c88881-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.731738 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e322d23-65da-40e8-b814-815c148aa523-logs\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.732399 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-scripts\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.733284 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-public-tls-certs\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.733830 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.734329 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-config-data-custom\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.734881 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.744015 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e322d23-65da-40e8-b814-815c148aa523-config-data\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.745959 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dhnm\" (UniqueName: \"kubernetes.io/projected/5e322d23-65da-40e8-b814-815c148aa523-kube-api-access-5dhnm\") pod \"cinder-api-0\" (UID: \"5e322d23-65da-40e8-b814-815c148aa523\") " pod="openstack/cinder-api-0" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.776660 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "03429d0b-f6d7-4b47-8dd9-475bf3c88881" (UID: "03429d0b-f6d7-4b47-8dd9-475bf3c88881"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.812914 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03429d0b-f6d7-4b47-8dd9-475bf3c88881" (UID: "03429d0b-f6d7-4b47-8dd9-475bf3c88881"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.828705 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-config-data" (OuterVolumeSpecName: "config-data") pod "03429d0b-f6d7-4b47-8dd9-475bf3c88881" (UID: "03429d0b-f6d7-4b47-8dd9-475bf3c88881"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.833623 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.833662 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.833674 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlqlk\" (UniqueName: \"kubernetes.io/projected/03429d0b-f6d7-4b47-8dd9-475bf3c88881-kube-api-access-rlqlk\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.833687 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.833697 5014 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03429d0b-f6d7-4b47-8dd9-475bf3c88881-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:08 crc kubenswrapper[5014]: I1205 11:09:08.955926 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.331189 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dffaadf-cdd0-4e00-b797-ce14b5be714b" path="/var/lib/kubelet/pods/2dffaadf-cdd0-4e00-b797-ce14b5be714b/volumes" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.332896 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2ed7afe-3f44-4214-b156-0404222f92a8" path="/var/lib/kubelet/pods/e2ed7afe-3f44-4214-b156-0404222f92a8/volumes" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.386330 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.479098 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.479163 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03429d0b-f6d7-4b47-8dd9-475bf3c88881","Type":"ContainerDied","Data":"f0b16bb4b105b2b54b146869aa9feb70e3b593b1a9546e95052fd8adc3382cf1"} Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.479242 5014 scope.go:117] "RemoveContainer" containerID="a3ff08cf44f46bcf7afef2fe8932efc9b21cc82e563a0c280fc59d74c21f8ed3" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.488687 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"5e322d23-65da-40e8-b814-815c148aa523","Type":"ContainerStarted","Data":"f4a48dd127725c29e4d56b90d5441f031cf13fdd5f4857d6936216f603d23f7d"} Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.619110 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.622918 5014 scope.go:117] "RemoveContainer" containerID="bb5125794779a1e980da5772450649d5158cbe00df3d1bfea79e7f1f807bb4da" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.635703 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.664225 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:09 crc kubenswrapper[5014]: E1205 11:09:09.664710 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="proxy-httpd" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.664728 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="proxy-httpd" Dec 05 11:09:09 crc kubenswrapper[5014]: E1205 11:09:09.664759 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="ceilometer-notification-agent" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.664766 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="ceilometer-notification-agent" Dec 05 11:09:09 crc kubenswrapper[5014]: E1205 11:09:09.664787 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="sg-core" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.664792 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="sg-core" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.664979 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="sg-core" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.665004 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="ceilometer-notification-agent" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.665013 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" containerName="proxy-httpd" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.667080 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.671247 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.673964 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.677522 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.689138 5014 scope.go:117] "RemoveContainer" containerID="46aaa88e1720862bf3073fdaae83dfb767f99ab32457cf45b16047437b4b6062" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.774111 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-log-httpd\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.774221 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.774259 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4s9g\" (UniqueName: \"kubernetes.io/projected/f35347c8-e6d4-414b-9549-844bf669b473-kube-api-access-c4s9g\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.774326 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-scripts\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.774374 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-run-httpd\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.774486 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-config-data\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.774530 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.877319 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.877884 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4s9g\" (UniqueName: \"kubernetes.io/projected/f35347c8-e6d4-414b-9549-844bf669b473-kube-api-access-c4s9g\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.877932 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-scripts\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.877954 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-run-httpd\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.878084 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-config-data\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.878137 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.878198 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-log-httpd\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.879174 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-run-httpd\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.880245 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-log-httpd\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.884576 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.896065 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-config-data\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.896471 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.898433 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-scripts\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.898894 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4s9g\" (UniqueName: \"kubernetes.io/projected/f35347c8-e6d4-414b-9549-844bf669b473-kube-api-access-c4s9g\") pod \"ceilometer-0\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " pod="openstack/ceilometer-0" Dec 05 11:09:09 crc kubenswrapper[5014]: I1205 11:09:09.997962 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:10 crc kubenswrapper[5014]: I1205 11:09:10.476243 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:10 crc kubenswrapper[5014]: I1205 11:09:10.499563 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35347c8-e6d4-414b-9549-844bf669b473","Type":"ContainerStarted","Data":"68c7762b74540b107ff15efe669477c6ad75419b2dd540e53cdb2f971b891760"} Dec 05 11:09:10 crc kubenswrapper[5014]: I1205 11:09:10.505447 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"5e322d23-65da-40e8-b814-815c148aa523","Type":"ContainerStarted","Data":"073db6c40f45658e18eb831d0c56e3cde19c59847a98835329f04783518b0b2a"} Dec 05 11:09:10 crc kubenswrapper[5014]: I1205 11:09:10.511564 5014 generic.go:334] "Generic (PLEG): container finished" podID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerID="e3ef6d861eeac7f4670cd6dade931517836e11444cd2462e01b5fe72fb3766cb" exitCode=0 Dec 05 11:09:10 crc kubenswrapper[5014]: I1205 11:09:10.511612 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dd6878f44-n5k2l" event={"ID":"b3d0ec93-b994-4bc6-9a86-7085e79c7208","Type":"ContainerDied","Data":"e3ef6d861eeac7f4670cd6dade931517836e11444cd2462e01b5fe72fb3766cb"} Dec 05 11:09:10 crc kubenswrapper[5014]: I1205 11:09:10.618155 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5dd6878f44-n5k2l" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 05 11:09:11 crc kubenswrapper[5014]: I1205 11:09:11.330045 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03429d0b-f6d7-4b47-8dd9-475bf3c88881" path="/var/lib/kubelet/pods/03429d0b-f6d7-4b47-8dd9-475bf3c88881/volumes" Dec 05 11:09:11 crc kubenswrapper[5014]: I1205 11:09:11.521282 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35347c8-e6d4-414b-9549-844bf669b473","Type":"ContainerStarted","Data":"152b15d6354d541595a47fbb9dad773697d4b7701c1d297aa168783e3e08473e"} Dec 05 11:09:11 crc kubenswrapper[5014]: I1205 11:09:11.524343 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"5e322d23-65da-40e8-b814-815c148aa523","Type":"ContainerStarted","Data":"d6b1fe850f397ea630c278edfbab957cce2c4880fb3da9f6b8096a58ab7de34e"} Dec 05 11:09:11 crc kubenswrapper[5014]: I1205 11:09:11.524577 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 11:09:11 crc kubenswrapper[5014]: I1205 11:09:11.547958 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.547936747 podStartE2EDuration="3.547936747s" podCreationTimestamp="2025-12-05 11:09:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:11.539864223 +0000 UTC m=+1278.487981947" watchObservedRunningTime="2025-12-05 11:09:11.547936747 +0000 UTC m=+1278.496054451" Dec 05 11:09:12 crc kubenswrapper[5014]: I1205 11:09:12.546793 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35347c8-e6d4-414b-9549-844bf669b473","Type":"ContainerStarted","Data":"e8f2da87ff98d07a4d41ba24a48275874f932ae90d0025c1cf6b8a098fb4cd10"} Dec 05 11:09:13 crc kubenswrapper[5014]: I1205 11:09:13.424486 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 11:09:13 crc kubenswrapper[5014]: I1205 11:09:13.486939 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:09:13 crc kubenswrapper[5014]: I1205 11:09:13.544499 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:09:13 crc kubenswrapper[5014]: I1205 11:09:13.556260 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35347c8-e6d4-414b-9549-844bf669b473","Type":"ContainerStarted","Data":"2e05f0bb3c7d29aa4ec2b13e9066fa37e4afee7b282c9fe81d6d15551936871d"} Dec 05 11:09:13 crc kubenswrapper[5014]: I1205 11:09:13.557213 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="31848c45-5068-47bb-899e-5e4240ff1886" containerName="probe" containerID="cri-o://bf696e78b067a9ac11d94c766bec29cfc7ec9cb4f8b412b592b99c3f1e792e3d" gracePeriod=30 Dec 05 11:09:13 crc kubenswrapper[5014]: I1205 11:09:13.557187 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="31848c45-5068-47bb-899e-5e4240ff1886" containerName="cinder-scheduler" containerID="cri-o://e0fa949bf6d95664ef0ac7cb734d6420fd98257fea61d2c98d9c21adfc4f4109" gracePeriod=30 Dec 05 11:09:13 crc kubenswrapper[5014]: I1205 11:09:13.610947 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-rs5m9"] Dec 05 11:09:13 crc kubenswrapper[5014]: I1205 11:09:13.611428 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" podUID="5693e3c7-0809-4e13-9e46-315780139182" containerName="dnsmasq-dns" containerID="cri-o://eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a" gracePeriod=10 Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.123794 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.279660 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-nb\") pod \"5693e3c7-0809-4e13-9e46-315780139182\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.279773 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-swift-storage-0\") pod \"5693e3c7-0809-4e13-9e46-315780139182\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.279867 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-config\") pod \"5693e3c7-0809-4e13-9e46-315780139182\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.279943 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-sb\") pod \"5693e3c7-0809-4e13-9e46-315780139182\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.279965 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-svc\") pod \"5693e3c7-0809-4e13-9e46-315780139182\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.280005 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85jqq\" (UniqueName: \"kubernetes.io/projected/5693e3c7-0809-4e13-9e46-315780139182-kube-api-access-85jqq\") pod \"5693e3c7-0809-4e13-9e46-315780139182\" (UID: \"5693e3c7-0809-4e13-9e46-315780139182\") " Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.286841 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5693e3c7-0809-4e13-9e46-315780139182-kube-api-access-85jqq" (OuterVolumeSpecName: "kube-api-access-85jqq") pod "5693e3c7-0809-4e13-9e46-315780139182" (UID: "5693e3c7-0809-4e13-9e46-315780139182"). InnerVolumeSpecName "kube-api-access-85jqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.326525 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-config" (OuterVolumeSpecName: "config") pod "5693e3c7-0809-4e13-9e46-315780139182" (UID: "5693e3c7-0809-4e13-9e46-315780139182"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.326596 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5693e3c7-0809-4e13-9e46-315780139182" (UID: "5693e3c7-0809-4e13-9e46-315780139182"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.335390 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5693e3c7-0809-4e13-9e46-315780139182" (UID: "5693e3c7-0809-4e13-9e46-315780139182"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.340585 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5693e3c7-0809-4e13-9e46-315780139182" (UID: "5693e3c7-0809-4e13-9e46-315780139182"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.345828 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5693e3c7-0809-4e13-9e46-315780139182" (UID: "5693e3c7-0809-4e13-9e46-315780139182"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.382696 5014 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.382745 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.382759 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.382773 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.382785 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85jqq\" (UniqueName: \"kubernetes.io/projected/5693e3c7-0809-4e13-9e46-315780139182-kube-api-access-85jqq\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.382799 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5693e3c7-0809-4e13-9e46-315780139182-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.567670 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35347c8-e6d4-414b-9549-844bf669b473","Type":"ContainerStarted","Data":"c65d2dc7697a3f785bec0999e70007d2933eaef65a3589f5ec69fea8dcc0ab04"} Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.569146 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.570576 5014 generic.go:334] "Generic (PLEG): container finished" podID="5693e3c7-0809-4e13-9e46-315780139182" containerID="eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a" exitCode=0 Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.570643 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" event={"ID":"5693e3c7-0809-4e13-9e46-315780139182","Type":"ContainerDied","Data":"eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a"} Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.570671 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" event={"ID":"5693e3c7-0809-4e13-9e46-315780139182","Type":"ContainerDied","Data":"3ac8910e83d0d0a0dfee81bfdd5257ea5ebda9c055b521066aa3e9b089100185"} Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.570693 5014 scope.go:117] "RemoveContainer" containerID="eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.570807 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-rs5m9" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.582804 5014 generic.go:334] "Generic (PLEG): container finished" podID="31848c45-5068-47bb-899e-5e4240ff1886" containerID="bf696e78b067a9ac11d94c766bec29cfc7ec9cb4f8b412b592b99c3f1e792e3d" exitCode=0 Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.582847 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"31848c45-5068-47bb-899e-5e4240ff1886","Type":"ContainerDied","Data":"bf696e78b067a9ac11d94c766bec29cfc7ec9cb4f8b412b592b99c3f1e792e3d"} Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.602218 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.413538753 podStartE2EDuration="5.602194415s" podCreationTimestamp="2025-12-05 11:09:09 +0000 UTC" firstStartedPulling="2025-12-05 11:09:10.479860371 +0000 UTC m=+1277.427978075" lastFinishedPulling="2025-12-05 11:09:13.668516033 +0000 UTC m=+1280.616633737" observedRunningTime="2025-12-05 11:09:14.597538383 +0000 UTC m=+1281.545656097" watchObservedRunningTime="2025-12-05 11:09:14.602194415 +0000 UTC m=+1281.550312119" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.616210 5014 scope.go:117] "RemoveContainer" containerID="ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.629311 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-rs5m9"] Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.638771 5014 scope.go:117] "RemoveContainer" containerID="eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a" Dec 05 11:09:14 crc kubenswrapper[5014]: E1205 11:09:14.639061 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a\": container with ID starting with eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a not found: ID does not exist" containerID="eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.639099 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a"} err="failed to get container status \"eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a\": rpc error: code = NotFound desc = could not find container \"eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a\": container with ID starting with eee1d4d9790c16e1465146b51fbe7c053d47b2eb3fc6ac070742faf871cbe78a not found: ID does not exist" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.639128 5014 scope.go:117] "RemoveContainer" containerID="ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b" Dec 05 11:09:14 crc kubenswrapper[5014]: E1205 11:09:14.639359 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b\": container with ID starting with ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b not found: ID does not exist" containerID="ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.639387 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b"} err="failed to get container status \"ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b\": rpc error: code = NotFound desc = could not find container \"ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b\": container with ID starting with ebe69c979dceb091e1af0a704c44d485037b66b5c779c2858a675211e8fc306b not found: ID does not exist" Dec 05 11:09:14 crc kubenswrapper[5014]: I1205 11:09:14.640556 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-rs5m9"] Dec 05 11:09:15 crc kubenswrapper[5014]: I1205 11:09:15.328462 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5693e3c7-0809-4e13-9e46-315780139182" path="/var/lib/kubelet/pods/5693e3c7-0809-4e13-9e46-315780139182/volumes" Dec 05 11:09:16 crc kubenswrapper[5014]: I1205 11:09:16.522660 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:09:16 crc kubenswrapper[5014]: I1205 11:09:16.709207 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-75c6d4746d-f9vpc" Dec 05 11:09:16 crc kubenswrapper[5014]: I1205 11:09:16.817829 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-c6c5974d5-l72zk" Dec 05 11:09:18 crc kubenswrapper[5014]: I1205 11:09:18.622178 5014 generic.go:334] "Generic (PLEG): container finished" podID="31848c45-5068-47bb-899e-5e4240ff1886" containerID="e0fa949bf6d95664ef0ac7cb734d6420fd98257fea61d2c98d9c21adfc4f4109" exitCode=0 Dec 05 11:09:18 crc kubenswrapper[5014]: I1205 11:09:18.622336 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"31848c45-5068-47bb-899e-5e4240ff1886","Type":"ContainerDied","Data":"e0fa949bf6d95664ef0ac7cb734d6420fd98257fea61d2c98d9c21adfc4f4109"} Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.006226 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.186810 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31848c45-5068-47bb-899e-5e4240ff1886-etc-machine-id\") pod \"31848c45-5068-47bb-899e-5e4240ff1886\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.186890 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcnbs\" (UniqueName: \"kubernetes.io/projected/31848c45-5068-47bb-899e-5e4240ff1886-kube-api-access-mcnbs\") pod \"31848c45-5068-47bb-899e-5e4240ff1886\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.186924 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data\") pod \"31848c45-5068-47bb-899e-5e4240ff1886\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.186989 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-scripts\") pod \"31848c45-5068-47bb-899e-5e4240ff1886\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.187084 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-combined-ca-bundle\") pod \"31848c45-5068-47bb-899e-5e4240ff1886\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.187260 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data-custom\") pod \"31848c45-5068-47bb-899e-5e4240ff1886\" (UID: \"31848c45-5068-47bb-899e-5e4240ff1886\") " Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.188086 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/31848c45-5068-47bb-899e-5e4240ff1886-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "31848c45-5068-47bb-899e-5e4240ff1886" (UID: "31848c45-5068-47bb-899e-5e4240ff1886"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.194951 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-scripts" (OuterVolumeSpecName: "scripts") pod "31848c45-5068-47bb-899e-5e4240ff1886" (UID: "31848c45-5068-47bb-899e-5e4240ff1886"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.195620 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "31848c45-5068-47bb-899e-5e4240ff1886" (UID: "31848c45-5068-47bb-899e-5e4240ff1886"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.196224 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31848c45-5068-47bb-899e-5e4240ff1886-kube-api-access-mcnbs" (OuterVolumeSpecName: "kube-api-access-mcnbs") pod "31848c45-5068-47bb-899e-5e4240ff1886" (UID: "31848c45-5068-47bb-899e-5e4240ff1886"). InnerVolumeSpecName "kube-api-access-mcnbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.267910 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31848c45-5068-47bb-899e-5e4240ff1886" (UID: "31848c45-5068-47bb-899e-5e4240ff1886"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.290006 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.290047 5014 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.290060 5014 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31848c45-5068-47bb-899e-5e4240ff1886-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.290073 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcnbs\" (UniqueName: \"kubernetes.io/projected/31848c45-5068-47bb-899e-5e4240ff1886-kube-api-access-mcnbs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.290088 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.320300 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data" (OuterVolumeSpecName: "config-data") pod "31848c45-5068-47bb-899e-5e4240ff1886" (UID: "31848c45-5068-47bb-899e-5e4240ff1886"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.393264 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31848c45-5068-47bb-899e-5e4240ff1886-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.633802 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"31848c45-5068-47bb-899e-5e4240ff1886","Type":"ContainerDied","Data":"72c8d5328bb01cc8642f3b0feea30c8abc2b40ffcc325100e84ed0c614280a4c"} Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.633869 5014 scope.go:117] "RemoveContainer" containerID="bf696e78b067a9ac11d94c766bec29cfc7ec9cb4f8b412b592b99c3f1e792e3d" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.633902 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.659891 5014 scope.go:117] "RemoveContainer" containerID="e0fa949bf6d95664ef0ac7cb734d6420fd98257fea61d2c98d9c21adfc4f4109" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.669852 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.693904 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.712633 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:09:19 crc kubenswrapper[5014]: E1205 11:09:19.713148 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31848c45-5068-47bb-899e-5e4240ff1886" containerName="cinder-scheduler" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.713177 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="31848c45-5068-47bb-899e-5e4240ff1886" containerName="cinder-scheduler" Dec 05 11:09:19 crc kubenswrapper[5014]: E1205 11:09:19.713224 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5693e3c7-0809-4e13-9e46-315780139182" containerName="dnsmasq-dns" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.713235 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="5693e3c7-0809-4e13-9e46-315780139182" containerName="dnsmasq-dns" Dec 05 11:09:19 crc kubenswrapper[5014]: E1205 11:09:19.713248 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31848c45-5068-47bb-899e-5e4240ff1886" containerName="probe" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.713257 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="31848c45-5068-47bb-899e-5e4240ff1886" containerName="probe" Dec 05 11:09:19 crc kubenswrapper[5014]: E1205 11:09:19.713289 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5693e3c7-0809-4e13-9e46-315780139182" containerName="init" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.713297 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="5693e3c7-0809-4e13-9e46-315780139182" containerName="init" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.713541 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="5693e3c7-0809-4e13-9e46-315780139182" containerName="dnsmasq-dns" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.713572 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="31848c45-5068-47bb-899e-5e4240ff1886" containerName="cinder-scheduler" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.713585 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="31848c45-5068-47bb-899e-5e4240ff1886" containerName="probe" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.714900 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.722315 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.726052 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.800922 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/934811df-aabf-44df-8b73-4612a55d73a2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.801191 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-scripts\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.801476 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44z6n\" (UniqueName: \"kubernetes.io/projected/934811df-aabf-44df-8b73-4612a55d73a2-kube-api-access-44z6n\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.801688 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-config-data\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.801885 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.802162 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.910402 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.910468 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/934811df-aabf-44df-8b73-4612a55d73a2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.910537 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-scripts\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.910586 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44z6n\" (UniqueName: \"kubernetes.io/projected/934811df-aabf-44df-8b73-4612a55d73a2-kube-api-access-44z6n\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.910628 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/934811df-aabf-44df-8b73-4612a55d73a2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.910637 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-config-data\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.910688 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.914885 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.915191 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-config-data\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.915734 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-scripts\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.927172 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/934811df-aabf-44df-8b73-4612a55d73a2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:19 crc kubenswrapper[5014]: I1205 11:09:19.934971 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44z6n\" (UniqueName: \"kubernetes.io/projected/934811df-aabf-44df-8b73-4612a55d73a2-kube-api-access-44z6n\") pod \"cinder-scheduler-0\" (UID: \"934811df-aabf-44df-8b73-4612a55d73a2\") " pod="openstack/cinder-scheduler-0" Dec 05 11:09:20 crc kubenswrapper[5014]: I1205 11:09:20.040062 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 11:09:20 crc kubenswrapper[5014]: I1205 11:09:20.518436 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:09:20 crc kubenswrapper[5014]: I1205 11:09:20.582380 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 11:09:20 crc kubenswrapper[5014]: I1205 11:09:20.618618 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5dd6878f44-n5k2l" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 05 11:09:20 crc kubenswrapper[5014]: I1205 11:09:20.675907 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"934811df-aabf-44df-8b73-4612a55d73a2","Type":"ContainerStarted","Data":"e4dcb415546b0d1aee1cb0070003df93590bd933ade539af2a175326e748ad53"} Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.115638 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.133957 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-848c5c7c55-ctrjl"] Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.135548 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.140752 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.140989 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.141152 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.164622 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-848c5c7c55-ctrjl"] Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.237983 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36878e89-1c1c-4054-b9a5-159e056f95f4-run-httpd\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.238069 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-public-tls-certs\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.238094 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-internal-tls-certs\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.238115 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/36878e89-1c1c-4054-b9a5-159e056f95f4-etc-swift\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.238165 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-combined-ca-bundle\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.238220 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5glf\" (UniqueName: \"kubernetes.io/projected/36878e89-1c1c-4054-b9a5-159e056f95f4-kube-api-access-g5glf\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.238243 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-config-data\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.238312 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36878e89-1c1c-4054-b9a5-159e056f95f4-log-httpd\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.340109 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36878e89-1c1c-4054-b9a5-159e056f95f4-run-httpd\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.340479 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-public-tls-certs\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.340499 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-internal-tls-certs\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.340519 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/36878e89-1c1c-4054-b9a5-159e056f95f4-etc-swift\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.340565 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-combined-ca-bundle\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.340625 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5glf\" (UniqueName: \"kubernetes.io/projected/36878e89-1c1c-4054-b9a5-159e056f95f4-kube-api-access-g5glf\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.340649 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-config-data\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.340695 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36878e89-1c1c-4054-b9a5-159e056f95f4-log-httpd\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.341157 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36878e89-1c1c-4054-b9a5-159e056f95f4-log-httpd\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.341472 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36878e89-1c1c-4054-b9a5-159e056f95f4-run-httpd\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.353740 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31848c45-5068-47bb-899e-5e4240ff1886" path="/var/lib/kubelet/pods/31848c45-5068-47bb-899e-5e4240ff1886/volumes" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.355063 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-internal-tls-certs\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.363233 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5glf\" (UniqueName: \"kubernetes.io/projected/36878e89-1c1c-4054-b9a5-159e056f95f4-kube-api-access-g5glf\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.368870 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-public-tls-certs\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.370104 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-config-data\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.370836 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/36878e89-1c1c-4054-b9a5-159e056f95f4-etc-swift\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.388797 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36878e89-1c1c-4054-b9a5-159e056f95f4-combined-ca-bundle\") pod \"swift-proxy-848c5c7c55-ctrjl\" (UID: \"36878e89-1c1c-4054-b9a5-159e056f95f4\") " pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.402897 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.404310 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.408063 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.409701 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-cxhkj" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.411593 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.444479 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.497061 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.544428 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx2kl\" (UniqueName: \"kubernetes.io/projected/8c29177a-803e-4037-b68c-f407a82a1537-kube-api-access-cx2kl\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.544548 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config-secret\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.544614 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.544654 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.617575 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 05 11:09:21 crc kubenswrapper[5014]: E1205 11:09:21.619308 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-cx2kl openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="8c29177a-803e-4037-b68c-f407a82a1537" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.633907 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.646915 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx2kl\" (UniqueName: \"kubernetes.io/projected/8c29177a-803e-4037-b68c-f407a82a1537-kube-api-access-cx2kl\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.647083 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config-secret\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.647173 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.647212 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: E1205 11:09:21.649329 5014 projected.go:194] Error preparing data for projected volume kube-api-access-cx2kl for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 05 11:09:21 crc kubenswrapper[5014]: E1205 11:09:21.649392 5014 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8c29177a-803e-4037-b68c-f407a82a1537-kube-api-access-cx2kl podName:8c29177a-803e-4037-b68c-f407a82a1537 nodeName:}" failed. No retries permitted until 2025-12-05 11:09:22.149369061 +0000 UTC m=+1289.097486765 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cx2kl" (UniqueName: "kubernetes.io/projected/8c29177a-803e-4037-b68c-f407a82a1537-kube-api-access-cx2kl") pod "openstackclient" (UID: "8c29177a-803e-4037-b68c-f407a82a1537") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.649820 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.654548 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.655995 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config-secret\") pod \"openstackclient\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.714788 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.716474 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.742852 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.743236 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"934811df-aabf-44df-8b73-4612a55d73a2","Type":"ContainerStarted","Data":"067e18f1c04d71f98bc2cbd987172df2494f2c0612ed213d2cb726b61d0a4ec1"} Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.743974 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.746217 5014 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="8c29177a-803e-4037-b68c-f407a82a1537" podUID="f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.782156 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.850093 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-combined-ca-bundle\") pod \"8c29177a-803e-4037-b68c-f407a82a1537\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.851082 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config-secret\") pod \"8c29177a-803e-4037-b68c-f407a82a1537\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.851167 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config\") pod \"8c29177a-803e-4037-b68c-f407a82a1537\" (UID: \"8c29177a-803e-4037-b68c-f407a82a1537\") " Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.851987 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "8c29177a-803e-4037-b68c-f407a82a1537" (UID: "8c29177a-803e-4037-b68c-f407a82a1537"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.852617 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5w7t\" (UniqueName: \"kubernetes.io/projected/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-kube-api-access-g5w7t\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.852670 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-openstack-config\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.852778 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.852901 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-openstack-config-secret\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.852992 5014 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.853005 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cx2kl\" (UniqueName: \"kubernetes.io/projected/8c29177a-803e-4037-b68c-f407a82a1537-kube-api-access-cx2kl\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.857386 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "8c29177a-803e-4037-b68c-f407a82a1537" (UID: "8c29177a-803e-4037-b68c-f407a82a1537"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.857454 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c29177a-803e-4037-b68c-f407a82a1537" (UID: "8c29177a-803e-4037-b68c-f407a82a1537"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.954829 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5w7t\" (UniqueName: \"kubernetes.io/projected/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-kube-api-access-g5w7t\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.955130 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-openstack-config\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.955215 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.955358 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-openstack-config-secret\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.955469 5014 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.955488 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c29177a-803e-4037-b68c-f407a82a1537-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.960503 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-openstack-config\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.961052 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-openstack-config-secret\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.963848 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:21 crc kubenswrapper[5014]: I1205 11:09:21.979101 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5w7t\" (UniqueName: \"kubernetes.io/projected/f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56-kube-api-access-g5w7t\") pod \"openstackclient\" (UID: \"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56\") " pod="openstack/openstackclient" Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.077202 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.108665 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-848c5c7c55-ctrjl"] Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.364530 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.752741 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-848c5c7c55-ctrjl" event={"ID":"36878e89-1c1c-4054-b9a5-159e056f95f4","Type":"ContainerStarted","Data":"2525073aaead31efda5d6fa26ce6ade9ff815f77db9768394ae5068f15064a25"} Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.753099 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-848c5c7c55-ctrjl" event={"ID":"36878e89-1c1c-4054-b9a5-159e056f95f4","Type":"ContainerStarted","Data":"451d6473ea8b2a23529769aa044c093acc0ce4397fca130526471b7f4ce3ebc2"} Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.754293 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56","Type":"ContainerStarted","Data":"c4b5f00cbd4bc523b0adedb67d386d5eede07de05d1dd619b59202ac8adfda63"} Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.758126 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.758171 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"934811df-aabf-44df-8b73-4612a55d73a2","Type":"ContainerStarted","Data":"9c50ed65edbae6d9b6e32af9d18a28bea1a573caba3addbdfb0a4a7497c657d1"} Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.763784 5014 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="8c29177a-803e-4037-b68c-f407a82a1537" podUID="f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56" Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.790388 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.7903654319999998 podStartE2EDuration="3.790365432s" podCreationTimestamp="2025-12-05 11:09:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:22.786152151 +0000 UTC m=+1289.734269875" watchObservedRunningTime="2025-12-05 11:09:22.790365432 +0000 UTC m=+1289.738483136" Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.843326 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.843847 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="ceilometer-central-agent" containerID="cri-o://152b15d6354d541595a47fbb9dad773697d4b7701c1d297aa168783e3e08473e" gracePeriod=30 Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.844178 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="proxy-httpd" containerID="cri-o://c65d2dc7697a3f785bec0999e70007d2933eaef65a3589f5ec69fea8dcc0ab04" gracePeriod=30 Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.844226 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="ceilometer-notification-agent" containerID="cri-o://e8f2da87ff98d07a4d41ba24a48275874f932ae90d0025c1cf6b8a098fb4cd10" gracePeriod=30 Dec 05 11:09:22 crc kubenswrapper[5014]: I1205 11:09:22.844173 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="sg-core" containerID="cri-o://2e05f0bb3c7d29aa4ec2b13e9066fa37e4afee7b282c9fe81d6d15551936871d" gracePeriod=30 Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.350683 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c29177a-803e-4037-b68c-f407a82a1537" path="/var/lib/kubelet/pods/8c29177a-803e-4037-b68c-f407a82a1537/volumes" Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.768754 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-848c5c7c55-ctrjl" event={"ID":"36878e89-1c1c-4054-b9a5-159e056f95f4","Type":"ContainerStarted","Data":"64911d21cd839d112e7f179ffbd17f6289048692a1569eae6520726c6712d7f3"} Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.769546 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.773694 5014 generic.go:334] "Generic (PLEG): container finished" podID="f35347c8-e6d4-414b-9549-844bf669b473" containerID="c65d2dc7697a3f785bec0999e70007d2933eaef65a3589f5ec69fea8dcc0ab04" exitCode=0 Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.773722 5014 generic.go:334] "Generic (PLEG): container finished" podID="f35347c8-e6d4-414b-9549-844bf669b473" containerID="2e05f0bb3c7d29aa4ec2b13e9066fa37e4afee7b282c9fe81d6d15551936871d" exitCode=2 Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.773730 5014 generic.go:334] "Generic (PLEG): container finished" podID="f35347c8-e6d4-414b-9549-844bf669b473" containerID="152b15d6354d541595a47fbb9dad773697d4b7701c1d297aa168783e3e08473e" exitCode=0 Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.773956 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35347c8-e6d4-414b-9549-844bf669b473","Type":"ContainerDied","Data":"c65d2dc7697a3f785bec0999e70007d2933eaef65a3589f5ec69fea8dcc0ab04"} Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.774057 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35347c8-e6d4-414b-9549-844bf669b473","Type":"ContainerDied","Data":"2e05f0bb3c7d29aa4ec2b13e9066fa37e4afee7b282c9fe81d6d15551936871d"} Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.774134 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35347c8-e6d4-414b-9549-844bf669b473","Type":"ContainerDied","Data":"152b15d6354d541595a47fbb9dad773697d4b7701c1d297aa168783e3e08473e"} Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.793934 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-848c5c7c55-ctrjl" podStartSLOduration=2.793916795 podStartE2EDuration="2.793916795s" podCreationTimestamp="2025-12-05 11:09:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:23.790030941 +0000 UTC m=+1290.738148655" watchObservedRunningTime="2025-12-05 11:09:23.793916795 +0000 UTC m=+1290.742034499" Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.858632 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5d74b89875-gnlqf" Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.963835 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-569f7f9774-fb89t"] Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.964533 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-569f7f9774-fb89t" podUID="ce101ba1-a588-4de6-bac6-964f608c509d" containerName="neutron-httpd" containerID="cri-o://ca26be0cf7abe096de9c9490cfe3b0c3074f3a897a905edae548fcde4ad09208" gracePeriod=30 Dec 05 11:09:23 crc kubenswrapper[5014]: I1205 11:09:23.964134 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-569f7f9774-fb89t" podUID="ce101ba1-a588-4de6-bac6-964f608c509d" containerName="neutron-api" containerID="cri-o://ae477d35ef3fbc3e04bdb3118a0fc729e46f7083ca03ac71211a35d4b6a353a1" gracePeriod=30 Dec 05 11:09:24 crc kubenswrapper[5014]: I1205 11:09:24.793248 5014 generic.go:334] "Generic (PLEG): container finished" podID="ce101ba1-a588-4de6-bac6-964f608c509d" containerID="ca26be0cf7abe096de9c9490cfe3b0c3074f3a897a905edae548fcde4ad09208" exitCode=0 Dec 05 11:09:24 crc kubenswrapper[5014]: I1205 11:09:24.793306 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f7f9774-fb89t" event={"ID":"ce101ba1-a588-4de6-bac6-964f608c509d","Type":"ContainerDied","Data":"ca26be0cf7abe096de9c9490cfe3b0c3074f3a897a905edae548fcde4ad09208"} Dec 05 11:09:24 crc kubenswrapper[5014]: I1205 11:09:24.794564 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:25 crc kubenswrapper[5014]: I1205 11:09:25.041224 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 11:09:25 crc kubenswrapper[5014]: I1205 11:09:25.836425 5014 generic.go:334] "Generic (PLEG): container finished" podID="f35347c8-e6d4-414b-9549-844bf669b473" containerID="e8f2da87ff98d07a4d41ba24a48275874f932ae90d0025c1cf6b8a098fb4cd10" exitCode=0 Dec 05 11:09:25 crc kubenswrapper[5014]: I1205 11:09:25.836621 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35347c8-e6d4-414b-9549-844bf669b473","Type":"ContainerDied","Data":"e8f2da87ff98d07a4d41ba24a48275874f932ae90d0025c1cf6b8a098fb4cd10"} Dec 05 11:09:25 crc kubenswrapper[5014]: I1205 11:09:25.928589 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.058244 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-config-data\") pod \"f35347c8-e6d4-414b-9549-844bf669b473\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.058643 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-scripts\") pod \"f35347c8-e6d4-414b-9549-844bf669b473\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.058732 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4s9g\" (UniqueName: \"kubernetes.io/projected/f35347c8-e6d4-414b-9549-844bf669b473-kube-api-access-c4s9g\") pod \"f35347c8-e6d4-414b-9549-844bf669b473\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.058850 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-combined-ca-bundle\") pod \"f35347c8-e6d4-414b-9549-844bf669b473\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.058945 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-sg-core-conf-yaml\") pod \"f35347c8-e6d4-414b-9549-844bf669b473\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.059119 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-run-httpd\") pod \"f35347c8-e6d4-414b-9549-844bf669b473\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.059352 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-log-httpd\") pod \"f35347c8-e6d4-414b-9549-844bf669b473\" (UID: \"f35347c8-e6d4-414b-9549-844bf669b473\") " Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.060603 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f35347c8-e6d4-414b-9549-844bf669b473" (UID: "f35347c8-e6d4-414b-9549-844bf669b473"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.062610 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f35347c8-e6d4-414b-9549-844bf669b473" (UID: "f35347c8-e6d4-414b-9549-844bf669b473"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.070475 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35347c8-e6d4-414b-9549-844bf669b473-kube-api-access-c4s9g" (OuterVolumeSpecName: "kube-api-access-c4s9g") pod "f35347c8-e6d4-414b-9549-844bf669b473" (UID: "f35347c8-e6d4-414b-9549-844bf669b473"). InnerVolumeSpecName "kube-api-access-c4s9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.070777 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-scripts" (OuterVolumeSpecName: "scripts") pod "f35347c8-e6d4-414b-9549-844bf669b473" (UID: "f35347c8-e6d4-414b-9549-844bf669b473"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.110209 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f35347c8-e6d4-414b-9549-844bf669b473" (UID: "f35347c8-e6d4-414b-9549-844bf669b473"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.161939 5014 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.162624 5014 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f35347c8-e6d4-414b-9549-844bf669b473-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.162712 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.162796 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4s9g\" (UniqueName: \"kubernetes.io/projected/f35347c8-e6d4-414b-9549-844bf669b473-kube-api-access-c4s9g\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.164019 5014 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.192588 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f35347c8-e6d4-414b-9549-844bf669b473" (UID: "f35347c8-e6d4-414b-9549-844bf669b473"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.198944 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-config-data" (OuterVolumeSpecName: "config-data") pod "f35347c8-e6d4-414b-9549-844bf669b473" (UID: "f35347c8-e6d4-414b-9549-844bf669b473"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.265513 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.265558 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35347c8-e6d4-414b-9549-844bf669b473-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.850514 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f35347c8-e6d4-414b-9549-844bf669b473","Type":"ContainerDied","Data":"68c7762b74540b107ff15efe669477c6ad75419b2dd540e53cdb2f971b891760"} Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.850892 5014 scope.go:117] "RemoveContainer" containerID="c65d2dc7697a3f785bec0999e70007d2933eaef65a3589f5ec69fea8dcc0ab04" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.850631 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.855433 5014 generic.go:334] "Generic (PLEG): container finished" podID="ce101ba1-a588-4de6-bac6-964f608c509d" containerID="ae477d35ef3fbc3e04bdb3118a0fc729e46f7083ca03ac71211a35d4b6a353a1" exitCode=0 Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.855494 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f7f9774-fb89t" event={"ID":"ce101ba1-a588-4de6-bac6-964f608c509d","Type":"ContainerDied","Data":"ae477d35ef3fbc3e04bdb3118a0fc729e46f7083ca03ac71211a35d4b6a353a1"} Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.896849 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.906091 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.913447 5014 scope.go:117] "RemoveContainer" containerID="2e05f0bb3c7d29aa4ec2b13e9066fa37e4afee7b282c9fe81d6d15551936871d" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.922562 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:26 crc kubenswrapper[5014]: E1205 11:09:26.922978 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="proxy-httpd" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.922996 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="proxy-httpd" Dec 05 11:09:26 crc kubenswrapper[5014]: E1205 11:09:26.923017 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="ceilometer-notification-agent" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.923023 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="ceilometer-notification-agent" Dec 05 11:09:26 crc kubenswrapper[5014]: E1205 11:09:26.923047 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="ceilometer-central-agent" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.923055 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="ceilometer-central-agent" Dec 05 11:09:26 crc kubenswrapper[5014]: E1205 11:09:26.923081 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="sg-core" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.923086 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="sg-core" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.923248 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="proxy-httpd" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.923282 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="ceilometer-notification-agent" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.923296 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="ceilometer-central-agent" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.923312 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35347c8-e6d4-414b-9549-844bf669b473" containerName="sg-core" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.924955 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.930669 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.930911 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.941369 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.966252 5014 scope.go:117] "RemoveContainer" containerID="e8f2da87ff98d07a4d41ba24a48275874f932ae90d0025c1cf6b8a098fb4cd10" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.971535 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.983911 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-config-data\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.984192 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-scripts\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.984337 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-log-httpd\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.984452 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.984586 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-run-httpd\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.984760 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d87b\" (UniqueName: \"kubernetes.io/projected/e0ab7533-dc12-4a86-9191-fb6e11db2740-kube-api-access-9d87b\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.984862 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:26 crc kubenswrapper[5014]: I1205 11:09:26.995163 5014 scope.go:117] "RemoveContainer" containerID="152b15d6354d541595a47fbb9dad773697d4b7701c1d297aa168783e3e08473e" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.087639 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-ovndb-tls-certs\") pod \"ce101ba1-a588-4de6-bac6-964f608c509d\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.087779 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-combined-ca-bundle\") pod \"ce101ba1-a588-4de6-bac6-964f608c509d\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.087818 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-config\") pod \"ce101ba1-a588-4de6-bac6-964f608c509d\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.087912 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmsjr\" (UniqueName: \"kubernetes.io/projected/ce101ba1-a588-4de6-bac6-964f608c509d-kube-api-access-nmsjr\") pod \"ce101ba1-a588-4de6-bac6-964f608c509d\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.088129 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-httpd-config\") pod \"ce101ba1-a588-4de6-bac6-964f608c509d\" (UID: \"ce101ba1-a588-4de6-bac6-964f608c509d\") " Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.088499 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-scripts\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.088550 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-log-httpd\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.088578 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.088604 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-run-httpd\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.088675 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d87b\" (UniqueName: \"kubernetes.io/projected/e0ab7533-dc12-4a86-9191-fb6e11db2740-kube-api-access-9d87b\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.088706 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.088763 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-config-data\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.089568 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-log-httpd\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.091839 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-run-httpd\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.098233 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-config-data\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.100012 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce101ba1-a588-4de6-bac6-964f608c509d-kube-api-access-nmsjr" (OuterVolumeSpecName: "kube-api-access-nmsjr") pod "ce101ba1-a588-4de6-bac6-964f608c509d" (UID: "ce101ba1-a588-4de6-bac6-964f608c509d"). InnerVolumeSpecName "kube-api-access-nmsjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.101861 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.102552 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-scripts\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.108788 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "ce101ba1-a588-4de6-bac6-964f608c509d" (UID: "ce101ba1-a588-4de6-bac6-964f608c509d"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.108813 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.111175 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d87b\" (UniqueName: \"kubernetes.io/projected/e0ab7533-dc12-4a86-9191-fb6e11db2740-kube-api-access-9d87b\") pod \"ceilometer-0\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.170316 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-config" (OuterVolumeSpecName: "config") pod "ce101ba1-a588-4de6-bac6-964f608c509d" (UID: "ce101ba1-a588-4de6-bac6-964f608c509d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.172146 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce101ba1-a588-4de6-bac6-964f608c509d" (UID: "ce101ba1-a588-4de6-bac6-964f608c509d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.194065 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.194103 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.194116 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmsjr\" (UniqueName: \"kubernetes.io/projected/ce101ba1-a588-4de6-bac6-964f608c509d-kube-api-access-nmsjr\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.194131 5014 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.194535 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "ce101ba1-a588-4de6-bac6-964f608c509d" (UID: "ce101ba1-a588-4de6-bac6-964f608c509d"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.266568 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.295690 5014 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce101ba1-a588-4de6-bac6-964f608c509d-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.335361 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f35347c8-e6d4-414b-9549-844bf669b473" path="/var/lib/kubelet/pods/f35347c8-e6d4-414b-9549-844bf669b473/volumes" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.778177 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.866425 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-569f7f9774-fb89t" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.866368 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-569f7f9774-fb89t" event={"ID":"ce101ba1-a588-4de6-bac6-964f608c509d","Type":"ContainerDied","Data":"d0507a1674953bee963913c4f85e9ab802803b1c93f635141652d4e71ebbe973"} Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.866606 5014 scope.go:117] "RemoveContainer" containerID="ca26be0cf7abe096de9c9490cfe3b0c3074f3a897a905edae548fcde4ad09208" Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.871845 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0ab7533-dc12-4a86-9191-fb6e11db2740","Type":"ContainerStarted","Data":"29c747d650d65892299313ea83c502cede8dd48f3259057ea979457bf3cd0c61"} Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.895521 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-569f7f9774-fb89t"] Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.903090 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-569f7f9774-fb89t"] Dec 05 11:09:27 crc kubenswrapper[5014]: I1205 11:09:27.905680 5014 scope.go:117] "RemoveContainer" containerID="ae477d35ef3fbc3e04bdb3118a0fc729e46f7083ca03ac71211a35d4b6a353a1" Dec 05 11:09:29 crc kubenswrapper[5014]: I1205 11:09:29.352932 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce101ba1-a588-4de6-bac6-964f608c509d" path="/var/lib/kubelet/pods/ce101ba1-a588-4de6-bac6-964f608c509d/volumes" Dec 05 11:09:29 crc kubenswrapper[5014]: I1205 11:09:29.481113 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:30 crc kubenswrapper[5014]: I1205 11:09:30.303385 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 11:09:30 crc kubenswrapper[5014]: I1205 11:09:30.618577 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5dd6878f44-n5k2l" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 05 11:09:30 crc kubenswrapper[5014]: I1205 11:09:30.618687 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:09:31 crc kubenswrapper[5014]: I1205 11:09:31.508316 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:31 crc kubenswrapper[5014]: I1205 11:09:31.508873 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-848c5c7c55-ctrjl" Dec 05 11:09:31 crc kubenswrapper[5014]: E1205 11:09:31.956824 5014 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/bbab4afd7cc85c4282ff4ff82764ae587c560592929ef2ba070875eb277c0691/diff" to get inode usage: stat /var/lib/containers/storage/overlay/bbab4afd7cc85c4282ff4ff82764ae587c560592929ef2ba070875eb277c0691/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_horizon-865b65b5c7-qjg6b_d7407954-f41a-48ad-8cda-8c165c4fb5b8/horizon-log/0.log" to get inode usage: stat /var/log/pods/openstack_horizon-865b65b5c7-qjg6b_d7407954-f41a-48ad-8cda-8c165c4fb5b8/horizon-log/0.log: no such file or directory Dec 05 11:09:31 crc kubenswrapper[5014]: E1205 11:09:31.964650 5014 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/b13830d1ffff89cc459be0a323c39dbebab139a962b4c92b2e40f790beecd244/diff" to get inode usage: stat /var/lib/containers/storage/overlay/b13830d1ffff89cc459be0a323c39dbebab139a962b4c92b2e40f790beecd244/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_horizon-757c4b6dbf-492tq_803d83df-f847-425f-895a-4b1ea26e6868/horizon-log/0.log" to get inode usage: stat /var/log/pods/openstack_horizon-757c4b6dbf-492tq_803d83df-f847-425f-895a-4b1ea26e6868/horizon-log/0.log: no such file or directory Dec 05 11:09:32 crc kubenswrapper[5014]: E1205 11:09:32.491908 5014 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/d8a1c32a0c2e79fdf0d5e976cb94be7985396d1d784feb975e19d1387c1e88ab/diff" to get inode usage: stat /var/lib/containers/storage/overlay/d8a1c32a0c2e79fdf0d5e976cb94be7985396d1d784feb975e19d1387c1e88ab/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_horizon-757c4b6dbf-492tq_803d83df-f847-425f-895a-4b1ea26e6868/horizon/0.log" to get inode usage: stat /var/log/pods/openstack_horizon-757c4b6dbf-492tq_803d83df-f847-425f-895a-4b1ea26e6868/horizon/0.log: no such file or directory Dec 05 11:09:32 crc kubenswrapper[5014]: E1205 11:09:32.506797 5014 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/722f85834b79fc54adea278c7aae49baad8f652b22c03b418712dacacd02da45/diff" to get inode usage: stat /var/lib/containers/storage/overlay/722f85834b79fc54adea278c7aae49baad8f652b22c03b418712dacacd02da45/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_horizon-865b65b5c7-qjg6b_d7407954-f41a-48ad-8cda-8c165c4fb5b8/horizon/0.log" to get inode usage: stat /var/log/pods/openstack_horizon-865b65b5c7-qjg6b_d7407954-f41a-48ad-8cda-8c165c4fb5b8/horizon/0.log: no such file or directory Dec 05 11:09:32 crc kubenswrapper[5014]: E1205 11:09:32.797914 5014 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/49f862b64e35fa99ce462dc77dd28929120a4b6be6c0583576485d75d9a092dd/diff" to get inode usage: stat /var/lib/containers/storage/overlay/49f862b64e35fa99ce462dc77dd28929120a4b6be6c0583576485d75d9a092dd/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_ceilometer-0_03429d0b-f6d7-4b47-8dd9-475bf3c88881/ceilometer-notification-agent/0.log" to get inode usage: stat /var/log/pods/openstack_ceilometer-0_03429d0b-f6d7-4b47-8dd9-475bf3c88881/ceilometer-notification-agent/0.log: no such file or directory Dec 05 11:09:35 crc kubenswrapper[5014]: I1205 11:09:35.952720 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56","Type":"ContainerStarted","Data":"cfc450a81dd74f38b9ab787f001de1baaded59345c46a5e7f8a2b601a02053da"} Dec 05 11:09:35 crc kubenswrapper[5014]: I1205 11:09:35.955845 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0ab7533-dc12-4a86-9191-fb6e11db2740","Type":"ContainerStarted","Data":"734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae"} Dec 05 11:09:35 crc kubenswrapper[5014]: I1205 11:09:35.978844 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.210067615 podStartE2EDuration="14.978817814s" podCreationTimestamp="2025-12-05 11:09:21 +0000 UTC" firstStartedPulling="2025-12-05 11:09:22.374095984 +0000 UTC m=+1289.322213688" lastFinishedPulling="2025-12-05 11:09:35.142846193 +0000 UTC m=+1302.090963887" observedRunningTime="2025-12-05 11:09:35.969166011 +0000 UTC m=+1302.917283735" watchObservedRunningTime="2025-12-05 11:09:35.978817814 +0000 UTC m=+1302.926935538" Dec 05 11:09:36 crc kubenswrapper[5014]: E1205 11:09:36.635379 5014 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0269d6fe_d6ee_4840_8ae2_cde3db4a989b.slice/crio-cfe3dbe88a69a20832b284895cdc94e0deb847895c2fba121f7d1f760b6ddc8a: Error finding container cfe3dbe88a69a20832b284895cdc94e0deb847895c2fba121f7d1f760b6ddc8a: Status 404 returned error can't find the container with id cfe3dbe88a69a20832b284895cdc94e0deb847895c2fba121f7d1f760b6ddc8a Dec 05 11:09:36 crc kubenswrapper[5014]: W1205 11:09:36.641168 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03429d0b_f6d7_4b47_8dd9_475bf3c88881.slice/crio-a3ff08cf44f46bcf7afef2fe8932efc9b21cc82e563a0c280fc59d74c21f8ed3.scope WatchSource:0}: Error finding container a3ff08cf44f46bcf7afef2fe8932efc9b21cc82e563a0c280fc59d74c21f8ed3: Status 404 returned error can't find the container with id a3ff08cf44f46bcf7afef2fe8932efc9b21cc82e563a0c280fc59d74c21f8ed3 Dec 05 11:09:36 crc kubenswrapper[5014]: W1205 11:09:36.641885 5014 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31848c45_5068_47bb_899e_5e4240ff1886.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31848c45_5068_47bb_899e_5e4240ff1886.slice: no such file or directory Dec 05 11:09:36 crc kubenswrapper[5014]: W1205 11:09:36.644459 5014 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2dffaadf_cdd0_4e00_b797_ce14b5be714b.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2dffaadf_cdd0_4e00_b797_ce14b5be714b.slice: no such file or directory Dec 05 11:09:36 crc kubenswrapper[5014]: W1205 11:09:36.650331 5014 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf35347c8_e6d4_414b_9549_844bf669b473.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf35347c8_e6d4_414b_9549_844bf669b473.slice: no such file or directory Dec 05 11:09:36 crc kubenswrapper[5014]: W1205 11:09:36.652502 5014 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c29177a_803e_4037_b68c_f407a82a1537.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c29177a_803e_4037_b68c_f407a82a1537.slice: no such file or directory Dec 05 11:09:37 crc kubenswrapper[5014]: I1205 11:09:37.988687 5014 generic.go:334] "Generic (PLEG): container finished" podID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerID="dd1f207c6133c61599fa712c228173fded90676011bb29274b175a31f0d78f1a" exitCode=137 Dec 05 11:09:37 crc kubenswrapper[5014]: I1205 11:09:37.988756 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dd6878f44-n5k2l" event={"ID":"b3d0ec93-b994-4bc6-9a86-7085e79c7208","Type":"ContainerDied","Data":"dd1f207c6133c61599fa712c228173fded90676011bb29274b175a31f0d78f1a"} Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.145875 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.221018 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-scripts\") pod \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.221154 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-config-data\") pod \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.221210 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhqlp\" (UniqueName: \"kubernetes.io/projected/b3d0ec93-b994-4bc6-9a86-7085e79c7208-kube-api-access-fhqlp\") pod \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.221261 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-secret-key\") pod \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.227801 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "b3d0ec93-b994-4bc6-9a86-7085e79c7208" (UID: "b3d0ec93-b994-4bc6-9a86-7085e79c7208"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.229573 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3d0ec93-b994-4bc6-9a86-7085e79c7208-kube-api-access-fhqlp" (OuterVolumeSpecName: "kube-api-access-fhqlp") pod "b3d0ec93-b994-4bc6-9a86-7085e79c7208" (UID: "b3d0ec93-b994-4bc6-9a86-7085e79c7208"). InnerVolumeSpecName "kube-api-access-fhqlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.246981 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-config-data" (OuterVolumeSpecName: "config-data") pod "b3d0ec93-b994-4bc6-9a86-7085e79c7208" (UID: "b3d0ec93-b994-4bc6-9a86-7085e79c7208"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.256519 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-scripts" (OuterVolumeSpecName: "scripts") pod "b3d0ec93-b994-4bc6-9a86-7085e79c7208" (UID: "b3d0ec93-b994-4bc6-9a86-7085e79c7208"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.322852 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-tls-certs\") pod \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.323460 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3d0ec93-b994-4bc6-9a86-7085e79c7208-logs\") pod \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.323495 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-combined-ca-bundle\") pod \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\" (UID: \"b3d0ec93-b994-4bc6-9a86-7085e79c7208\") " Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.323912 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3d0ec93-b994-4bc6-9a86-7085e79c7208-logs" (OuterVolumeSpecName: "logs") pod "b3d0ec93-b994-4bc6-9a86-7085e79c7208" (UID: "b3d0ec93-b994-4bc6-9a86-7085e79c7208"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.324650 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.324673 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhqlp\" (UniqueName: \"kubernetes.io/projected/b3d0ec93-b994-4bc6-9a86-7085e79c7208-kube-api-access-fhqlp\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.324686 5014 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.324699 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3d0ec93-b994-4bc6-9a86-7085e79c7208-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.324710 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3d0ec93-b994-4bc6-9a86-7085e79c7208-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.358027 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3d0ec93-b994-4bc6-9a86-7085e79c7208" (UID: "b3d0ec93-b994-4bc6-9a86-7085e79c7208"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.390732 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "b3d0ec93-b994-4bc6-9a86-7085e79c7208" (UID: "b3d0ec93-b994-4bc6-9a86-7085e79c7208"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.426264 5014 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:38 crc kubenswrapper[5014]: I1205 11:09:38.426553 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d0ec93-b994-4bc6-9a86-7085e79c7208-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:39 crc kubenswrapper[5014]: I1205 11:09:39.000515 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dd6878f44-n5k2l" event={"ID":"b3d0ec93-b994-4bc6-9a86-7085e79c7208","Type":"ContainerDied","Data":"eb140c30daf7078f9b6d33c7b5180eab8b8fd4acc86994759e77bfca772df80a"} Dec 05 11:09:39 crc kubenswrapper[5014]: I1205 11:09:39.000715 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5dd6878f44-n5k2l" Dec 05 11:09:39 crc kubenswrapper[5014]: I1205 11:09:39.001115 5014 scope.go:117] "RemoveContainer" containerID="e3ef6d861eeac7f4670cd6dade931517836e11444cd2462e01b5fe72fb3766cb" Dec 05 11:09:39 crc kubenswrapper[5014]: I1205 11:09:39.011569 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0ab7533-dc12-4a86-9191-fb6e11db2740","Type":"ContainerStarted","Data":"4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a"} Dec 05 11:09:39 crc kubenswrapper[5014]: I1205 11:09:39.049348 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5dd6878f44-n5k2l"] Dec 05 11:09:39 crc kubenswrapper[5014]: I1205 11:09:39.063008 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5dd6878f44-n5k2l"] Dec 05 11:09:39 crc kubenswrapper[5014]: I1205 11:09:39.201840 5014 scope.go:117] "RemoveContainer" containerID="dd1f207c6133c61599fa712c228173fded90676011bb29274b175a31f0d78f1a" Dec 05 11:09:39 crc kubenswrapper[5014]: I1205 11:09:39.329491 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" path="/var/lib/kubelet/pods/b3d0ec93-b994-4bc6-9a86-7085e79c7208/volumes" Dec 05 11:09:40 crc kubenswrapper[5014]: I1205 11:09:40.052224 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0ab7533-dc12-4a86-9191-fb6e11db2740","Type":"ContainerStarted","Data":"d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8"} Dec 05 11:09:42 crc kubenswrapper[5014]: I1205 11:09:42.075722 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0ab7533-dc12-4a86-9191-fb6e11db2740","Type":"ContainerStarted","Data":"780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14"} Dec 05 11:09:42 crc kubenswrapper[5014]: I1205 11:09:42.076504 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:09:42 crc kubenswrapper[5014]: I1205 11:09:42.075904 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="proxy-httpd" containerID="cri-o://780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14" gracePeriod=30 Dec 05 11:09:42 crc kubenswrapper[5014]: I1205 11:09:42.075960 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="ceilometer-notification-agent" containerID="cri-o://4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a" gracePeriod=30 Dec 05 11:09:42 crc kubenswrapper[5014]: I1205 11:09:42.075858 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="ceilometer-central-agent" containerID="cri-o://734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae" gracePeriod=30 Dec 05 11:09:42 crc kubenswrapper[5014]: I1205 11:09:42.075972 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="sg-core" containerID="cri-o://d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8" gracePeriod=30 Dec 05 11:09:42 crc kubenswrapper[5014]: I1205 11:09:42.102941 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.844986461 podStartE2EDuration="16.102924264s" podCreationTimestamp="2025-12-05 11:09:26 +0000 UTC" firstStartedPulling="2025-12-05 11:09:27.795612695 +0000 UTC m=+1294.743730399" lastFinishedPulling="2025-12-05 11:09:41.053550498 +0000 UTC m=+1308.001668202" observedRunningTime="2025-12-05 11:09:42.101324985 +0000 UTC m=+1309.049442709" watchObservedRunningTime="2025-12-05 11:09:42.102924264 +0000 UTC m=+1309.051041968" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.084720 5014 generic.go:334] "Generic (PLEG): container finished" podID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerID="780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14" exitCode=0 Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.084765 5014 generic.go:334] "Generic (PLEG): container finished" podID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerID="d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8" exitCode=2 Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.084774 5014 generic.go:334] "Generic (PLEG): container finished" podID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerID="4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a" exitCode=0 Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.084802 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0ab7533-dc12-4a86-9191-fb6e11db2740","Type":"ContainerDied","Data":"780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14"} Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.084865 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0ab7533-dc12-4a86-9191-fb6e11db2740","Type":"ContainerDied","Data":"d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8"} Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.084880 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0ab7533-dc12-4a86-9191-fb6e11db2740","Type":"ContainerDied","Data":"4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a"} Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.247414 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-bq9r8"] Dec 05 11:09:43 crc kubenswrapper[5014]: E1205 11:09:43.247821 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.247841 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon" Dec 05 11:09:43 crc kubenswrapper[5014]: E1205 11:09:43.247865 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce101ba1-a588-4de6-bac6-964f608c509d" containerName="neutron-httpd" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.247872 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce101ba1-a588-4de6-bac6-964f608c509d" containerName="neutron-httpd" Dec 05 11:09:43 crc kubenswrapper[5014]: E1205 11:09:43.247890 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon-log" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.247897 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon-log" Dec 05 11:09:43 crc kubenswrapper[5014]: E1205 11:09:43.247912 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce101ba1-a588-4de6-bac6-964f608c509d" containerName="neutron-api" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.247918 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce101ba1-a588-4de6-bac6-964f608c509d" containerName="neutron-api" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.248088 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce101ba1-a588-4de6-bac6-964f608c509d" containerName="neutron-httpd" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.248101 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.248113 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce101ba1-a588-4de6-bac6-964f608c509d" containerName="neutron-api" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.248123 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3d0ec93-b994-4bc6-9a86-7085e79c7208" containerName="horizon-log" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.248728 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bq9r8" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.281421 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-bq9r8"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.342719 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs6pl\" (UniqueName: \"kubernetes.io/projected/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-kube-api-access-vs6pl\") pod \"nova-api-db-create-bq9r8\" (UID: \"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e\") " pod="openstack/nova-api-db-create-bq9r8" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.342789 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-operator-scripts\") pod \"nova-api-db-create-bq9r8\" (UID: \"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e\") " pod="openstack/nova-api-db-create-bq9r8" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.350442 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-b24t5"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.352144 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-b24t5" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.359760 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-b24t5"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.447937 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzdvf\" (UniqueName: \"kubernetes.io/projected/35aeff68-6195-4a18-bcc6-d744b43632bf-kube-api-access-rzdvf\") pod \"nova-cell0-db-create-b24t5\" (UID: \"35aeff68-6195-4a18-bcc6-d744b43632bf\") " pod="openstack/nova-cell0-db-create-b24t5" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.448154 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35aeff68-6195-4a18-bcc6-d744b43632bf-operator-scripts\") pod \"nova-cell0-db-create-b24t5\" (UID: \"35aeff68-6195-4a18-bcc6-d744b43632bf\") " pod="openstack/nova-cell0-db-create-b24t5" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.448240 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs6pl\" (UniqueName: \"kubernetes.io/projected/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-kube-api-access-vs6pl\") pod \"nova-api-db-create-bq9r8\" (UID: \"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e\") " pod="openstack/nova-api-db-create-bq9r8" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.448300 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-operator-scripts\") pod \"nova-api-db-create-bq9r8\" (UID: \"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e\") " pod="openstack/nova-api-db-create-bq9r8" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.449116 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-operator-scripts\") pod \"nova-api-db-create-bq9r8\" (UID: \"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e\") " pod="openstack/nova-api-db-create-bq9r8" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.449380 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-8768-account-create-update-8svtq"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.454717 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8768-account-create-update-8svtq" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.461258 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.484068 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8768-account-create-update-8svtq"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.492545 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs6pl\" (UniqueName: \"kubernetes.io/projected/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-kube-api-access-vs6pl\") pod \"nova-api-db-create-bq9r8\" (UID: \"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e\") " pod="openstack/nova-api-db-create-bq9r8" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.536739 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-6l8fm"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.538220 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6l8fm" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.550194 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzdvf\" (UniqueName: \"kubernetes.io/projected/35aeff68-6195-4a18-bcc6-d744b43632bf-kube-api-access-rzdvf\") pod \"nova-cell0-db-create-b24t5\" (UID: \"35aeff68-6195-4a18-bcc6-d744b43632bf\") " pod="openstack/nova-cell0-db-create-b24t5" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.550312 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35aeff68-6195-4a18-bcc6-d744b43632bf-operator-scripts\") pod \"nova-cell0-db-create-b24t5\" (UID: \"35aeff68-6195-4a18-bcc6-d744b43632bf\") " pod="openstack/nova-cell0-db-create-b24t5" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.550354 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s86gj\" (UniqueName: \"kubernetes.io/projected/75fbb5c7-8300-4ee3-9c24-6c05220babbb-kube-api-access-s86gj\") pod \"nova-api-8768-account-create-update-8svtq\" (UID: \"75fbb5c7-8300-4ee3-9c24-6c05220babbb\") " pod="openstack/nova-api-8768-account-create-update-8svtq" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.550379 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75fbb5c7-8300-4ee3-9c24-6c05220babbb-operator-scripts\") pod \"nova-api-8768-account-create-update-8svtq\" (UID: \"75fbb5c7-8300-4ee3-9c24-6c05220babbb\") " pod="openstack/nova-api-8768-account-create-update-8svtq" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.553550 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35aeff68-6195-4a18-bcc6-d744b43632bf-operator-scripts\") pod \"nova-cell0-db-create-b24t5\" (UID: \"35aeff68-6195-4a18-bcc6-d744b43632bf\") " pod="openstack/nova-cell0-db-create-b24t5" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.563941 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bq9r8" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.569216 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzdvf\" (UniqueName: \"kubernetes.io/projected/35aeff68-6195-4a18-bcc6-d744b43632bf-kube-api-access-rzdvf\") pod \"nova-cell0-db-create-b24t5\" (UID: \"35aeff68-6195-4a18-bcc6-d744b43632bf\") " pod="openstack/nova-cell0-db-create-b24t5" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.570206 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-6l8fm"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.651672 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s86gj\" (UniqueName: \"kubernetes.io/projected/75fbb5c7-8300-4ee3-9c24-6c05220babbb-kube-api-access-s86gj\") pod \"nova-api-8768-account-create-update-8svtq\" (UID: \"75fbb5c7-8300-4ee3-9c24-6c05220babbb\") " pod="openstack/nova-api-8768-account-create-update-8svtq" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.651731 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75fbb5c7-8300-4ee3-9c24-6c05220babbb-operator-scripts\") pod \"nova-api-8768-account-create-update-8svtq\" (UID: \"75fbb5c7-8300-4ee3-9c24-6c05220babbb\") " pod="openstack/nova-api-8768-account-create-update-8svtq" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.651826 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwv9s\" (UniqueName: \"kubernetes.io/projected/48fc994f-42ab-43fa-9f54-8b36b4c1379d-kube-api-access-jwv9s\") pod \"nova-cell1-db-create-6l8fm\" (UID: \"48fc994f-42ab-43fa-9f54-8b36b4c1379d\") " pod="openstack/nova-cell1-db-create-6l8fm" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.651913 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48fc994f-42ab-43fa-9f54-8b36b4c1379d-operator-scripts\") pod \"nova-cell1-db-create-6l8fm\" (UID: \"48fc994f-42ab-43fa-9f54-8b36b4c1379d\") " pod="openstack/nova-cell1-db-create-6l8fm" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.652586 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75fbb5c7-8300-4ee3-9c24-6c05220babbb-operator-scripts\") pod \"nova-api-8768-account-create-update-8svtq\" (UID: \"75fbb5c7-8300-4ee3-9c24-6c05220babbb\") " pod="openstack/nova-api-8768-account-create-update-8svtq" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.660041 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-f499-account-create-update-jx4jj"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.661260 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f499-account-create-update-jx4jj" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.666047 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.673877 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-b24t5" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.691331 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-f499-account-create-update-jx4jj"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.700887 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s86gj\" (UniqueName: \"kubernetes.io/projected/75fbb5c7-8300-4ee3-9c24-6c05220babbb-kube-api-access-s86gj\") pod \"nova-api-8768-account-create-update-8svtq\" (UID: \"75fbb5c7-8300-4ee3-9c24-6c05220babbb\") " pod="openstack/nova-api-8768-account-create-update-8svtq" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.754104 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8948faa6-4a90-4c87-b1ce-43fadf3f4548-operator-scripts\") pod \"nova-cell0-f499-account-create-update-jx4jj\" (UID: \"8948faa6-4a90-4c87-b1ce-43fadf3f4548\") " pod="openstack/nova-cell0-f499-account-create-update-jx4jj" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.754518 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwv9s\" (UniqueName: \"kubernetes.io/projected/48fc994f-42ab-43fa-9f54-8b36b4c1379d-kube-api-access-jwv9s\") pod \"nova-cell1-db-create-6l8fm\" (UID: \"48fc994f-42ab-43fa-9f54-8b36b4c1379d\") " pod="openstack/nova-cell1-db-create-6l8fm" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.754586 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8wwd\" (UniqueName: \"kubernetes.io/projected/8948faa6-4a90-4c87-b1ce-43fadf3f4548-kube-api-access-v8wwd\") pod \"nova-cell0-f499-account-create-update-jx4jj\" (UID: \"8948faa6-4a90-4c87-b1ce-43fadf3f4548\") " pod="openstack/nova-cell0-f499-account-create-update-jx4jj" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.754656 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48fc994f-42ab-43fa-9f54-8b36b4c1379d-operator-scripts\") pod \"nova-cell1-db-create-6l8fm\" (UID: \"48fc994f-42ab-43fa-9f54-8b36b4c1379d\") " pod="openstack/nova-cell1-db-create-6l8fm" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.756105 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48fc994f-42ab-43fa-9f54-8b36b4c1379d-operator-scripts\") pod \"nova-cell1-db-create-6l8fm\" (UID: \"48fc994f-42ab-43fa-9f54-8b36b4c1379d\") " pod="openstack/nova-cell1-db-create-6l8fm" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.790819 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwv9s\" (UniqueName: \"kubernetes.io/projected/48fc994f-42ab-43fa-9f54-8b36b4c1379d-kube-api-access-jwv9s\") pod \"nova-cell1-db-create-6l8fm\" (UID: \"48fc994f-42ab-43fa-9f54-8b36b4c1379d\") " pod="openstack/nova-cell1-db-create-6l8fm" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.792512 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8768-account-create-update-8svtq" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.856516 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8948faa6-4a90-4c87-b1ce-43fadf3f4548-operator-scripts\") pod \"nova-cell0-f499-account-create-update-jx4jj\" (UID: \"8948faa6-4a90-4c87-b1ce-43fadf3f4548\") " pod="openstack/nova-cell0-f499-account-create-update-jx4jj" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.856602 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8wwd\" (UniqueName: \"kubernetes.io/projected/8948faa6-4a90-4c87-b1ce-43fadf3f4548-kube-api-access-v8wwd\") pod \"nova-cell0-f499-account-create-update-jx4jj\" (UID: \"8948faa6-4a90-4c87-b1ce-43fadf3f4548\") " pod="openstack/nova-cell0-f499-account-create-update-jx4jj" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.857131 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-8549-account-create-update-gs47k"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.857674 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8948faa6-4a90-4c87-b1ce-43fadf3f4548-operator-scripts\") pod \"nova-cell0-f499-account-create-update-jx4jj\" (UID: \"8948faa6-4a90-4c87-b1ce-43fadf3f4548\") " pod="openstack/nova-cell0-f499-account-create-update-jx4jj" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.859147 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8549-account-create-update-gs47k" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.864583 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8549-account-create-update-gs47k"] Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.865285 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.876899 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6l8fm" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.885666 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8wwd\" (UniqueName: \"kubernetes.io/projected/8948faa6-4a90-4c87-b1ce-43fadf3f4548-kube-api-access-v8wwd\") pod \"nova-cell0-f499-account-create-update-jx4jj\" (UID: \"8948faa6-4a90-4c87-b1ce-43fadf3f4548\") " pod="openstack/nova-cell0-f499-account-create-update-jx4jj" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.958668 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ktpx\" (UniqueName: \"kubernetes.io/projected/af106b04-f0eb-4a65-bfb9-fe618fab8363-kube-api-access-5ktpx\") pod \"nova-cell1-8549-account-create-update-gs47k\" (UID: \"af106b04-f0eb-4a65-bfb9-fe618fab8363\") " pod="openstack/nova-cell1-8549-account-create-update-gs47k" Dec 05 11:09:43 crc kubenswrapper[5014]: I1205 11:09:43.958746 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af106b04-f0eb-4a65-bfb9-fe618fab8363-operator-scripts\") pod \"nova-cell1-8549-account-create-update-gs47k\" (UID: \"af106b04-f0eb-4a65-bfb9-fe618fab8363\") " pod="openstack/nova-cell1-8549-account-create-update-gs47k" Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.060435 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ktpx\" (UniqueName: \"kubernetes.io/projected/af106b04-f0eb-4a65-bfb9-fe618fab8363-kube-api-access-5ktpx\") pod \"nova-cell1-8549-account-create-update-gs47k\" (UID: \"af106b04-f0eb-4a65-bfb9-fe618fab8363\") " pod="openstack/nova-cell1-8549-account-create-update-gs47k" Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.060503 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af106b04-f0eb-4a65-bfb9-fe618fab8363-operator-scripts\") pod \"nova-cell1-8549-account-create-update-gs47k\" (UID: \"af106b04-f0eb-4a65-bfb9-fe618fab8363\") " pod="openstack/nova-cell1-8549-account-create-update-gs47k" Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.061343 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af106b04-f0eb-4a65-bfb9-fe618fab8363-operator-scripts\") pod \"nova-cell1-8549-account-create-update-gs47k\" (UID: \"af106b04-f0eb-4a65-bfb9-fe618fab8363\") " pod="openstack/nova-cell1-8549-account-create-update-gs47k" Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.088121 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ktpx\" (UniqueName: \"kubernetes.io/projected/af106b04-f0eb-4a65-bfb9-fe618fab8363-kube-api-access-5ktpx\") pod \"nova-cell1-8549-account-create-update-gs47k\" (UID: \"af106b04-f0eb-4a65-bfb9-fe618fab8363\") " pod="openstack/nova-cell1-8549-account-create-update-gs47k" Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.115795 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f499-account-create-update-jx4jj" Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.191261 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8549-account-create-update-gs47k" Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.197950 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-bq9r8"] Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.416562 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-b24t5"] Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.451959 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8768-account-create-update-8svtq"] Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.571938 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-6l8fm"] Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.745589 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-f499-account-create-update-jx4jj"] Dec 05 11:09:44 crc kubenswrapper[5014]: I1205 11:09:44.758778 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8549-account-create-update-gs47k"] Dec 05 11:09:44 crc kubenswrapper[5014]: W1205 11:09:44.775228 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8948faa6_4a90_4c87_b1ce_43fadf3f4548.slice/crio-8bd467da5f81c3f0fb02bcd76979bac1466cb91291bd76f6ed6b3b1c3f148cf7 WatchSource:0}: Error finding container 8bd467da5f81c3f0fb02bcd76979bac1466cb91291bd76f6ed6b3b1c3f148cf7: Status 404 returned error can't find the container with id 8bd467da5f81c3f0fb02bcd76979bac1466cb91291bd76f6ed6b3b1c3f148cf7 Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.108187 5014 generic.go:334] "Generic (PLEG): container finished" podID="ca2c99e9-b93d-4c6b-95b5-f0457af5d14e" containerID="3f23bcbab17fb896e9af8a057d99c75ba344b32b7ba6b4301c39a010c0a811cd" exitCode=0 Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.108252 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-bq9r8" event={"ID":"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e","Type":"ContainerDied","Data":"3f23bcbab17fb896e9af8a057d99c75ba344b32b7ba6b4301c39a010c0a811cd"} Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.108302 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-bq9r8" event={"ID":"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e","Type":"ContainerStarted","Data":"27f86d9577841581b4993ef81acf89f5533151f5aafac79baebad7fa0459380a"} Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.114422 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-b24t5" event={"ID":"35aeff68-6195-4a18-bcc6-d744b43632bf","Type":"ContainerStarted","Data":"7561be037724c144ef110d0196efc381fbf90df2e2dab2df37d625b23f71b9cf"} Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.114472 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-b24t5" event={"ID":"35aeff68-6195-4a18-bcc6-d744b43632bf","Type":"ContainerStarted","Data":"de182e2a30e1798b37dc354c40f9f17b11c9d297979fb2affb45e6959b88b18a"} Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.118771 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8768-account-create-update-8svtq" event={"ID":"75fbb5c7-8300-4ee3-9c24-6c05220babbb","Type":"ContainerStarted","Data":"ba4b3ed9a33f89af8eea5534aba5be3d466e6373c202cef8020761e3b1b23abc"} Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.119372 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8768-account-create-update-8svtq" event={"ID":"75fbb5c7-8300-4ee3-9c24-6c05220babbb","Type":"ContainerStarted","Data":"03615e1594c5410160d79cce4758306037b237702ced2c910285bdac0deee521"} Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.125403 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f499-account-create-update-jx4jj" event={"ID":"8948faa6-4a90-4c87-b1ce-43fadf3f4548","Type":"ContainerStarted","Data":"8bd467da5f81c3f0fb02bcd76979bac1466cb91291bd76f6ed6b3b1c3f148cf7"} Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.125659 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6l8fm" event={"ID":"48fc994f-42ab-43fa-9f54-8b36b4c1379d","Type":"ContainerStarted","Data":"57210b7bc39877dd3f757f1288fd0bb1008f4dc568162e18f3a3f2acab49fd43"} Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.125686 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6l8fm" event={"ID":"48fc994f-42ab-43fa-9f54-8b36b4c1379d","Type":"ContainerStarted","Data":"a3c75d5af7818116017737c65dd7549312e51643717550c2cbcf2fe8efde7cbc"} Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.134669 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8549-account-create-update-gs47k" event={"ID":"af106b04-f0eb-4a65-bfb9-fe618fab8363","Type":"ContainerStarted","Data":"2e05018b689d5a378a5a56b201724fa544202b0d8102c691a0f32ee9c4cdc446"} Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.148288 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-8768-account-create-update-8svtq" podStartSLOduration=2.148254647 podStartE2EDuration="2.148254647s" podCreationTimestamp="2025-12-05 11:09:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:45.142709064 +0000 UTC m=+1312.090826768" watchObservedRunningTime="2025-12-05 11:09:45.148254647 +0000 UTC m=+1312.096372341" Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.173172 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-b24t5" podStartSLOduration=2.173155736 podStartE2EDuration="2.173155736s" podCreationTimestamp="2025-12-05 11:09:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:45.161634769 +0000 UTC m=+1312.109752483" watchObservedRunningTime="2025-12-05 11:09:45.173155736 +0000 UTC m=+1312.121273440" Dec 05 11:09:45 crc kubenswrapper[5014]: I1205 11:09:45.187822 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-6l8fm" podStartSLOduration=2.187795159 podStartE2EDuration="2.187795159s" podCreationTimestamp="2025-12-05 11:09:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:45.180057143 +0000 UTC m=+1312.128174837" watchObservedRunningTime="2025-12-05 11:09:45.187795159 +0000 UTC m=+1312.135912853" Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.144990 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8549-account-create-update-gs47k" event={"ID":"af106b04-f0eb-4a65-bfb9-fe618fab8363","Type":"ContainerStarted","Data":"05ef908d99f937c96f32bfb4f1d831ad80435aec58da00ed951321d75d71102e"} Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.148110 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f499-account-create-update-jx4jj" event={"ID":"8948faa6-4a90-4c87-b1ce-43fadf3f4548","Type":"ContainerStarted","Data":"040b2418a90c7f480a0d95309d8a09a53ef2edc99d91873141b8f6a942a9747a"} Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.169028 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-8549-account-create-update-gs47k" podStartSLOduration=3.169003115 podStartE2EDuration="3.169003115s" podCreationTimestamp="2025-12-05 11:09:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:46.16215235 +0000 UTC m=+1313.110270074" watchObservedRunningTime="2025-12-05 11:09:46.169003115 +0000 UTC m=+1313.117120819" Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.185343 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-f499-account-create-update-jx4jj" podStartSLOduration=3.185322007 podStartE2EDuration="3.185322007s" podCreationTimestamp="2025-12-05 11:09:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:46.177592791 +0000 UTC m=+1313.125710495" watchObservedRunningTime="2025-12-05 11:09:46.185322007 +0000 UTC m=+1313.133439711" Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.566509 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bq9r8" Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.613972 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-operator-scripts\") pod \"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e\" (UID: \"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e\") " Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.614077 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vs6pl\" (UniqueName: \"kubernetes.io/projected/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-kube-api-access-vs6pl\") pod \"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e\" (UID: \"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e\") " Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.614886 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ca2c99e9-b93d-4c6b-95b5-f0457af5d14e" (UID: "ca2c99e9-b93d-4c6b-95b5-f0457af5d14e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.619449 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-kube-api-access-vs6pl" (OuterVolumeSpecName: "kube-api-access-vs6pl") pod "ca2c99e9-b93d-4c6b-95b5-f0457af5d14e" (UID: "ca2c99e9-b93d-4c6b-95b5-f0457af5d14e"). InnerVolumeSpecName "kube-api-access-vs6pl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.715583 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.715632 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vs6pl\" (UniqueName: \"kubernetes.io/projected/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e-kube-api-access-vs6pl\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.769048 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.769361 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" containerName="glance-log" containerID="cri-o://460ea910d7875fd4c009eabf1d4fbd9c9a8cd150dd3200f2a269f58655537bab" gracePeriod=30 Dec 05 11:09:46 crc kubenswrapper[5014]: I1205 11:09:46.769784 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" containerName="glance-httpd" containerID="cri-o://fcd92f4a8b32ab8955ca1a0b376a1e41627729516993fbfeff7770a5ca35d1f8" gracePeriod=30 Dec 05 11:09:46 crc kubenswrapper[5014]: E1205 11:09:46.952671 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48fc994f_42ab_43fa_9f54_8b36b4c1379d.slice/crio-57210b7bc39877dd3f757f1288fd0bb1008f4dc568162e18f3a3f2acab49fd43.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48fc994f_42ab_43fa_9f54_8b36b4c1379d.slice/crio-conmon-57210b7bc39877dd3f757f1288fd0bb1008f4dc568162e18f3a3f2acab49fd43.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75fbb5c7_8300_4ee3_9c24_6c05220babbb.slice/crio-ba4b3ed9a33f89af8eea5534aba5be3d466e6373c202cef8020761e3b1b23abc.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.158083 5014 generic.go:334] "Generic (PLEG): container finished" podID="af106b04-f0eb-4a65-bfb9-fe618fab8363" containerID="05ef908d99f937c96f32bfb4f1d831ad80435aec58da00ed951321d75d71102e" exitCode=0 Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.158443 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8549-account-create-update-gs47k" event={"ID":"af106b04-f0eb-4a65-bfb9-fe618fab8363","Type":"ContainerDied","Data":"05ef908d99f937c96f32bfb4f1d831ad80435aec58da00ed951321d75d71102e"} Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.160422 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-bq9r8" event={"ID":"ca2c99e9-b93d-4c6b-95b5-f0457af5d14e","Type":"ContainerDied","Data":"27f86d9577841581b4993ef81acf89f5533151f5aafac79baebad7fa0459380a"} Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.160455 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27f86d9577841581b4993ef81acf89f5533151f5aafac79baebad7fa0459380a" Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.160504 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bq9r8" Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.170284 5014 generic.go:334] "Generic (PLEG): container finished" podID="35aeff68-6195-4a18-bcc6-d744b43632bf" containerID="7561be037724c144ef110d0196efc381fbf90df2e2dab2df37d625b23f71b9cf" exitCode=0 Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.170312 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-b24t5" event={"ID":"35aeff68-6195-4a18-bcc6-d744b43632bf","Type":"ContainerDied","Data":"7561be037724c144ef110d0196efc381fbf90df2e2dab2df37d625b23f71b9cf"} Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.173127 5014 generic.go:334] "Generic (PLEG): container finished" podID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" containerID="460ea910d7875fd4c009eabf1d4fbd9c9a8cd150dd3200f2a269f58655537bab" exitCode=143 Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.173213 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4","Type":"ContainerDied","Data":"460ea910d7875fd4c009eabf1d4fbd9c9a8cd150dd3200f2a269f58655537bab"} Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.175341 5014 generic.go:334] "Generic (PLEG): container finished" podID="75fbb5c7-8300-4ee3-9c24-6c05220babbb" containerID="ba4b3ed9a33f89af8eea5534aba5be3d466e6373c202cef8020761e3b1b23abc" exitCode=0 Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.175414 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8768-account-create-update-8svtq" event={"ID":"75fbb5c7-8300-4ee3-9c24-6c05220babbb","Type":"ContainerDied","Data":"ba4b3ed9a33f89af8eea5534aba5be3d466e6373c202cef8020761e3b1b23abc"} Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.177495 5014 generic.go:334] "Generic (PLEG): container finished" podID="8948faa6-4a90-4c87-b1ce-43fadf3f4548" containerID="040b2418a90c7f480a0d95309d8a09a53ef2edc99d91873141b8f6a942a9747a" exitCode=0 Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.177653 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f499-account-create-update-jx4jj" event={"ID":"8948faa6-4a90-4c87-b1ce-43fadf3f4548","Type":"ContainerDied","Data":"040b2418a90c7f480a0d95309d8a09a53ef2edc99d91873141b8f6a942a9747a"} Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.180745 5014 generic.go:334] "Generic (PLEG): container finished" podID="48fc994f-42ab-43fa-9f54-8b36b4c1379d" containerID="57210b7bc39877dd3f757f1288fd0bb1008f4dc568162e18f3a3f2acab49fd43" exitCode=0 Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.180800 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6l8fm" event={"ID":"48fc994f-42ab-43fa-9f54-8b36b4c1379d","Type":"ContainerDied","Data":"57210b7bc39877dd3f757f1288fd0bb1008f4dc568162e18f3a3f2acab49fd43"} Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.734150 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.734781 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0ee1ae65-148c-40e6-afc5-8526bade7971" containerName="glance-log" containerID="cri-o://99b6d98c73190b35e4f1923ab1019f752510f57bd4b744606254ee60f1969619" gracePeriod=30 Dec 05 11:09:47 crc kubenswrapper[5014]: I1205 11:09:47.734856 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0ee1ae65-148c-40e6-afc5-8526bade7971" containerName="glance-httpd" containerID="cri-o://c5a5f72ca8e40c159108bdc35c106f726ac999ab72cc675407f910dc256e5823" gracePeriod=30 Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.190978 5014 generic.go:334] "Generic (PLEG): container finished" podID="0ee1ae65-148c-40e6-afc5-8526bade7971" containerID="99b6d98c73190b35e4f1923ab1019f752510f57bd4b744606254ee60f1969619" exitCode=143 Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.191069 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ee1ae65-148c-40e6-afc5-8526bade7971","Type":"ContainerDied","Data":"99b6d98c73190b35e4f1923ab1019f752510f57bd4b744606254ee60f1969619"} Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.630544 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6l8fm" Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.753321 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwv9s\" (UniqueName: \"kubernetes.io/projected/48fc994f-42ab-43fa-9f54-8b36b4c1379d-kube-api-access-jwv9s\") pod \"48fc994f-42ab-43fa-9f54-8b36b4c1379d\" (UID: \"48fc994f-42ab-43fa-9f54-8b36b4c1379d\") " Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.753452 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48fc994f-42ab-43fa-9f54-8b36b4c1379d-operator-scripts\") pod \"48fc994f-42ab-43fa-9f54-8b36b4c1379d\" (UID: \"48fc994f-42ab-43fa-9f54-8b36b4c1379d\") " Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.754732 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48fc994f-42ab-43fa-9f54-8b36b4c1379d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "48fc994f-42ab-43fa-9f54-8b36b4c1379d" (UID: "48fc994f-42ab-43fa-9f54-8b36b4c1379d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.769571 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48fc994f-42ab-43fa-9f54-8b36b4c1379d-kube-api-access-jwv9s" (OuterVolumeSpecName: "kube-api-access-jwv9s") pod "48fc994f-42ab-43fa-9f54-8b36b4c1379d" (UID: "48fc994f-42ab-43fa-9f54-8b36b4c1379d"). InnerVolumeSpecName "kube-api-access-jwv9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.855566 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwv9s\" (UniqueName: \"kubernetes.io/projected/48fc994f-42ab-43fa-9f54-8b36b4c1379d-kube-api-access-jwv9s\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.855772 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48fc994f-42ab-43fa-9f54-8b36b4c1379d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.927699 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f499-account-create-update-jx4jj" Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.937773 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8768-account-create-update-8svtq" Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.955085 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8549-account-create-update-gs47k" Dec 05 11:09:48 crc kubenswrapper[5014]: I1205 11:09:48.981599 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-b24t5" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.067992 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8wwd\" (UniqueName: \"kubernetes.io/projected/8948faa6-4a90-4c87-b1ce-43fadf3f4548-kube-api-access-v8wwd\") pod \"8948faa6-4a90-4c87-b1ce-43fadf3f4548\" (UID: \"8948faa6-4a90-4c87-b1ce-43fadf3f4548\") " Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.068089 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ktpx\" (UniqueName: \"kubernetes.io/projected/af106b04-f0eb-4a65-bfb9-fe618fab8363-kube-api-access-5ktpx\") pod \"af106b04-f0eb-4a65-bfb9-fe618fab8363\" (UID: \"af106b04-f0eb-4a65-bfb9-fe618fab8363\") " Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.068126 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af106b04-f0eb-4a65-bfb9-fe618fab8363-operator-scripts\") pod \"af106b04-f0eb-4a65-bfb9-fe618fab8363\" (UID: \"af106b04-f0eb-4a65-bfb9-fe618fab8363\") " Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.068205 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s86gj\" (UniqueName: \"kubernetes.io/projected/75fbb5c7-8300-4ee3-9c24-6c05220babbb-kube-api-access-s86gj\") pod \"75fbb5c7-8300-4ee3-9c24-6c05220babbb\" (UID: \"75fbb5c7-8300-4ee3-9c24-6c05220babbb\") " Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.068286 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8948faa6-4a90-4c87-b1ce-43fadf3f4548-operator-scripts\") pod \"8948faa6-4a90-4c87-b1ce-43fadf3f4548\" (UID: \"8948faa6-4a90-4c87-b1ce-43fadf3f4548\") " Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.068396 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75fbb5c7-8300-4ee3-9c24-6c05220babbb-operator-scripts\") pod \"75fbb5c7-8300-4ee3-9c24-6c05220babbb\" (UID: \"75fbb5c7-8300-4ee3-9c24-6c05220babbb\") " Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.069355 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af106b04-f0eb-4a65-bfb9-fe618fab8363-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "af106b04-f0eb-4a65-bfb9-fe618fab8363" (UID: "af106b04-f0eb-4a65-bfb9-fe618fab8363"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.069378 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75fbb5c7-8300-4ee3-9c24-6c05220babbb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "75fbb5c7-8300-4ee3-9c24-6c05220babbb" (UID: "75fbb5c7-8300-4ee3-9c24-6c05220babbb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.069407 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8948faa6-4a90-4c87-b1ce-43fadf3f4548-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8948faa6-4a90-4c87-b1ce-43fadf3f4548" (UID: "8948faa6-4a90-4c87-b1ce-43fadf3f4548"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.069696 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75fbb5c7-8300-4ee3-9c24-6c05220babbb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.069716 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af106b04-f0eb-4a65-bfb9-fe618fab8363-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.069728 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8948faa6-4a90-4c87-b1ce-43fadf3f4548-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.072517 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af106b04-f0eb-4a65-bfb9-fe618fab8363-kube-api-access-5ktpx" (OuterVolumeSpecName: "kube-api-access-5ktpx") pod "af106b04-f0eb-4a65-bfb9-fe618fab8363" (UID: "af106b04-f0eb-4a65-bfb9-fe618fab8363"). InnerVolumeSpecName "kube-api-access-5ktpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.072845 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8948faa6-4a90-4c87-b1ce-43fadf3f4548-kube-api-access-v8wwd" (OuterVolumeSpecName: "kube-api-access-v8wwd") pod "8948faa6-4a90-4c87-b1ce-43fadf3f4548" (UID: "8948faa6-4a90-4c87-b1ce-43fadf3f4548"). InnerVolumeSpecName "kube-api-access-v8wwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.084505 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75fbb5c7-8300-4ee3-9c24-6c05220babbb-kube-api-access-s86gj" (OuterVolumeSpecName: "kube-api-access-s86gj") pod "75fbb5c7-8300-4ee3-9c24-6c05220babbb" (UID: "75fbb5c7-8300-4ee3-9c24-6c05220babbb"). InnerVolumeSpecName "kube-api-access-s86gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.171241 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzdvf\" (UniqueName: \"kubernetes.io/projected/35aeff68-6195-4a18-bcc6-d744b43632bf-kube-api-access-rzdvf\") pod \"35aeff68-6195-4a18-bcc6-d744b43632bf\" (UID: \"35aeff68-6195-4a18-bcc6-d744b43632bf\") " Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.171406 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35aeff68-6195-4a18-bcc6-d744b43632bf-operator-scripts\") pod \"35aeff68-6195-4a18-bcc6-d744b43632bf\" (UID: \"35aeff68-6195-4a18-bcc6-d744b43632bf\") " Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.171794 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35aeff68-6195-4a18-bcc6-d744b43632bf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "35aeff68-6195-4a18-bcc6-d744b43632bf" (UID: "35aeff68-6195-4a18-bcc6-d744b43632bf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.172355 5014 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35aeff68-6195-4a18-bcc6-d744b43632bf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.172383 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8wwd\" (UniqueName: \"kubernetes.io/projected/8948faa6-4a90-4c87-b1ce-43fadf3f4548-kube-api-access-v8wwd\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.172396 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ktpx\" (UniqueName: \"kubernetes.io/projected/af106b04-f0eb-4a65-bfb9-fe618fab8363-kube-api-access-5ktpx\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.172410 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s86gj\" (UniqueName: \"kubernetes.io/projected/75fbb5c7-8300-4ee3-9c24-6c05220babbb-kube-api-access-s86gj\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.181576 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35aeff68-6195-4a18-bcc6-d744b43632bf-kube-api-access-rzdvf" (OuterVolumeSpecName: "kube-api-access-rzdvf") pod "35aeff68-6195-4a18-bcc6-d744b43632bf" (UID: "35aeff68-6195-4a18-bcc6-d744b43632bf"). InnerVolumeSpecName "kube-api-access-rzdvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.206816 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8768-account-create-update-8svtq" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.207446 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8768-account-create-update-8svtq" event={"ID":"75fbb5c7-8300-4ee3-9c24-6c05220babbb","Type":"ContainerDied","Data":"03615e1594c5410160d79cce4758306037b237702ced2c910285bdac0deee521"} Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.207496 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03615e1594c5410160d79cce4758306037b237702ced2c910285bdac0deee521" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.209191 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f499-account-create-update-jx4jj" event={"ID":"8948faa6-4a90-4c87-b1ce-43fadf3f4548","Type":"ContainerDied","Data":"8bd467da5f81c3f0fb02bcd76979bac1466cb91291bd76f6ed6b3b1c3f148cf7"} Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.209243 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8bd467da5f81c3f0fb02bcd76979bac1466cb91291bd76f6ed6b3b1c3f148cf7" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.209324 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f499-account-create-update-jx4jj" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.211684 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6l8fm" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.211679 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6l8fm" event={"ID":"48fc994f-42ab-43fa-9f54-8b36b4c1379d","Type":"ContainerDied","Data":"a3c75d5af7818116017737c65dd7549312e51643717550c2cbcf2fe8efde7cbc"} Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.211842 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3c75d5af7818116017737c65dd7549312e51643717550c2cbcf2fe8efde7cbc" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.214995 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8549-account-create-update-gs47k" event={"ID":"af106b04-f0eb-4a65-bfb9-fe618fab8363","Type":"ContainerDied","Data":"2e05018b689d5a378a5a56b201724fa544202b0d8102c691a0f32ee9c4cdc446"} Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.215028 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e05018b689d5a378a5a56b201724fa544202b0d8102c691a0f32ee9c4cdc446" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.215108 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8549-account-create-update-gs47k" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.224035 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-b24t5" event={"ID":"35aeff68-6195-4a18-bcc6-d744b43632bf","Type":"ContainerDied","Data":"de182e2a30e1798b37dc354c40f9f17b11c9d297979fb2affb45e6959b88b18a"} Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.224111 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de182e2a30e1798b37dc354c40f9f17b11c9d297979fb2affb45e6959b88b18a" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.224216 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-b24t5" Dec 05 11:09:49 crc kubenswrapper[5014]: I1205 11:09:49.274488 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzdvf\" (UniqueName: \"kubernetes.io/projected/35aeff68-6195-4a18-bcc6-d744b43632bf-kube-api-access-rzdvf\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.247000 5014 generic.go:334] "Generic (PLEG): container finished" podID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" containerID="fcd92f4a8b32ab8955ca1a0b376a1e41627729516993fbfeff7770a5ca35d1f8" exitCode=0 Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.247111 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4","Type":"ContainerDied","Data":"fcd92f4a8b32ab8955ca1a0b376a1e41627729516993fbfeff7770a5ca35d1f8"} Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.505554 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.603368 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-scripts\") pod \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.603525 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-logs\") pod \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.603593 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-combined-ca-bundle\") pod \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.603657 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-config-data\") pod \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.603703 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-public-tls-certs\") pod \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.603726 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.603758 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-httpd-run\") pod \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.603832 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwqxx\" (UniqueName: \"kubernetes.io/projected/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-kube-api-access-jwqxx\") pod \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\" (UID: \"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4\") " Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.604205 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-logs" (OuterVolumeSpecName: "logs") pod "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" (UID: "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.605655 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" (UID: "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.609347 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-kube-api-access-jwqxx" (OuterVolumeSpecName: "kube-api-access-jwqxx") pod "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" (UID: "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4"). InnerVolumeSpecName "kube-api-access-jwqxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.609655 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-scripts" (OuterVolumeSpecName: "scripts") pod "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" (UID: "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.613529 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" (UID: "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.643697 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" (UID: "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.654490 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" (UID: "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.678259 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-config-data" (OuterVolumeSpecName: "config-data") pod "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" (UID: "7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.705790 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.705818 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.705828 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.705840 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.705847 5014 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.705882 5014 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.705893 5014 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.705903 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwqxx\" (UniqueName: \"kubernetes.io/projected/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4-kube-api-access-jwqxx\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.725511 5014 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 05 11:09:50 crc kubenswrapper[5014]: I1205 11:09:50.807401 5014 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.268527 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4","Type":"ContainerDied","Data":"2add85ce5d9f676f9989d25d667eff660eb4e53dc7d7f80a9ef0be3a0d40f6aa"} Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.268587 5014 scope.go:117] "RemoveContainer" containerID="fcd92f4a8b32ab8955ca1a0b376a1e41627729516993fbfeff7770a5ca35d1f8" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.268750 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.279106 5014 generic.go:334] "Generic (PLEG): container finished" podID="0ee1ae65-148c-40e6-afc5-8526bade7971" containerID="c5a5f72ca8e40c159108bdc35c106f726ac999ab72cc675407f910dc256e5823" exitCode=0 Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.279240 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ee1ae65-148c-40e6-afc5-8526bade7971","Type":"ContainerDied","Data":"c5a5f72ca8e40c159108bdc35c106f726ac999ab72cc675407f910dc256e5823"} Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.328892 5014 scope.go:117] "RemoveContainer" containerID="460ea910d7875fd4c009eabf1d4fbd9c9a8cd150dd3200f2a269f58655537bab" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.362349 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.376718 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.398662 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:09:51 crc kubenswrapper[5014]: E1205 11:09:51.399107 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8948faa6-4a90-4c87-b1ce-43fadf3f4548" containerName="mariadb-account-create-update" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399131 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="8948faa6-4a90-4c87-b1ce-43fadf3f4548" containerName="mariadb-account-create-update" Dec 05 11:09:51 crc kubenswrapper[5014]: E1205 11:09:51.399141 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48fc994f-42ab-43fa-9f54-8b36b4c1379d" containerName="mariadb-database-create" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399150 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="48fc994f-42ab-43fa-9f54-8b36b4c1379d" containerName="mariadb-database-create" Dec 05 11:09:51 crc kubenswrapper[5014]: E1205 11:09:51.399167 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35aeff68-6195-4a18-bcc6-d744b43632bf" containerName="mariadb-database-create" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399175 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="35aeff68-6195-4a18-bcc6-d744b43632bf" containerName="mariadb-database-create" Dec 05 11:09:51 crc kubenswrapper[5014]: E1205 11:09:51.399190 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" containerName="glance-httpd" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399197 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" containerName="glance-httpd" Dec 05 11:09:51 crc kubenswrapper[5014]: E1205 11:09:51.399225 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" containerName="glance-log" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399232 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" containerName="glance-log" Dec 05 11:09:51 crc kubenswrapper[5014]: E1205 11:09:51.399250 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca2c99e9-b93d-4c6b-95b5-f0457af5d14e" containerName="mariadb-database-create" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399257 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca2c99e9-b93d-4c6b-95b5-f0457af5d14e" containerName="mariadb-database-create" Dec 05 11:09:51 crc kubenswrapper[5014]: E1205 11:09:51.399301 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75fbb5c7-8300-4ee3-9c24-6c05220babbb" containerName="mariadb-account-create-update" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399310 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="75fbb5c7-8300-4ee3-9c24-6c05220babbb" containerName="mariadb-account-create-update" Dec 05 11:09:51 crc kubenswrapper[5014]: E1205 11:09:51.399325 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af106b04-f0eb-4a65-bfb9-fe618fab8363" containerName="mariadb-account-create-update" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399333 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="af106b04-f0eb-4a65-bfb9-fe618fab8363" containerName="mariadb-account-create-update" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399518 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" containerName="glance-httpd" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399530 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca2c99e9-b93d-4c6b-95b5-f0457af5d14e" containerName="mariadb-database-create" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399537 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="8948faa6-4a90-4c87-b1ce-43fadf3f4548" containerName="mariadb-account-create-update" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399552 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="af106b04-f0eb-4a65-bfb9-fe618fab8363" containerName="mariadb-account-create-update" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399567 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="75fbb5c7-8300-4ee3-9c24-6c05220babbb" containerName="mariadb-account-create-update" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399575 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" containerName="glance-log" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399584 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="48fc994f-42ab-43fa-9f54-8b36b4c1379d" containerName="mariadb-database-create" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.399598 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="35aeff68-6195-4a18-bcc6-d744b43632bf" containerName="mariadb-database-create" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.400635 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.405767 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.407684 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.425746 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.463088 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.533533 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.533602 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.533644 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-scripts\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.533706 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.533728 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c59756a0-84f2-4678-9294-aaa2475d08ec-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.533783 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txhmp\" (UniqueName: \"kubernetes.io/projected/c59756a0-84f2-4678-9294-aaa2475d08ec-kube-api-access-txhmp\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.533820 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c59756a0-84f2-4678-9294-aaa2475d08ec-logs\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.533848 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-config-data\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.635452 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-logs\") pod \"0ee1ae65-148c-40e6-afc5-8526bade7971\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.635535 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssmph\" (UniqueName: \"kubernetes.io/projected/0ee1ae65-148c-40e6-afc5-8526bade7971-kube-api-access-ssmph\") pod \"0ee1ae65-148c-40e6-afc5-8526bade7971\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.635615 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-combined-ca-bundle\") pod \"0ee1ae65-148c-40e6-afc5-8526bade7971\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.635647 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-config-data\") pod \"0ee1ae65-148c-40e6-afc5-8526bade7971\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.635709 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-internal-tls-certs\") pod \"0ee1ae65-148c-40e6-afc5-8526bade7971\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.635739 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-scripts\") pod \"0ee1ae65-148c-40e6-afc5-8526bade7971\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.635866 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"0ee1ae65-148c-40e6-afc5-8526bade7971\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.635912 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-httpd-run\") pod \"0ee1ae65-148c-40e6-afc5-8526bade7971\" (UID: \"0ee1ae65-148c-40e6-afc5-8526bade7971\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636207 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-logs" (OuterVolumeSpecName: "logs") pod "0ee1ae65-148c-40e6-afc5-8526bade7971" (UID: "0ee1ae65-148c-40e6-afc5-8526bade7971"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636237 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636436 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-scripts\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636533 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636590 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c59756a0-84f2-4678-9294-aaa2475d08ec-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636659 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txhmp\" (UniqueName: \"kubernetes.io/projected/c59756a0-84f2-4678-9294-aaa2475d08ec-kube-api-access-txhmp\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636722 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c59756a0-84f2-4678-9294-aaa2475d08ec-logs\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636758 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0ee1ae65-148c-40e6-afc5-8526bade7971" (UID: "0ee1ae65-148c-40e6-afc5-8526bade7971"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636784 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-config-data\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636831 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636950 5014 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.636965 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ee1ae65-148c-40e6-afc5-8526bade7971-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.638469 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c59756a0-84f2-4678-9294-aaa2475d08ec-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.638729 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c59756a0-84f2-4678-9294-aaa2475d08ec-logs\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.639057 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.644368 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ee1ae65-148c-40e6-afc5-8526bade7971-kube-api-access-ssmph" (OuterVolumeSpecName: "kube-api-access-ssmph") pod "0ee1ae65-148c-40e6-afc5-8526bade7971" (UID: "0ee1ae65-148c-40e6-afc5-8526bade7971"). InnerVolumeSpecName "kube-api-access-ssmph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.645648 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-scripts" (OuterVolumeSpecName: "scripts") pod "0ee1ae65-148c-40e6-afc5-8526bade7971" (UID: "0ee1ae65-148c-40e6-afc5-8526bade7971"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.646968 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.647718 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-scripts\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.655728 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-config-data\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.661895 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txhmp\" (UniqueName: \"kubernetes.io/projected/c59756a0-84f2-4678-9294-aaa2475d08ec-kube-api-access-txhmp\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.668517 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c59756a0-84f2-4678-9294-aaa2475d08ec-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.681992 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "0ee1ae65-148c-40e6-afc5-8526bade7971" (UID: "0ee1ae65-148c-40e6-afc5-8526bade7971"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.696173 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"c59756a0-84f2-4678-9294-aaa2475d08ec\") " pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.700626 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ee1ae65-148c-40e6-afc5-8526bade7971" (UID: "0ee1ae65-148c-40e6-afc5-8526bade7971"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.724417 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.739532 5014 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.739594 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssmph\" (UniqueName: \"kubernetes.io/projected/0ee1ae65-148c-40e6-afc5-8526bade7971-kube-api-access-ssmph\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.739612 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.739622 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.742434 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-config-data" (OuterVolumeSpecName: "config-data") pod "0ee1ae65-148c-40e6-afc5-8526bade7971" (UID: "0ee1ae65-148c-40e6-afc5-8526bade7971"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.755379 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0ee1ae65-148c-40e6-afc5-8526bade7971" (UID: "0ee1ae65-148c-40e6-afc5-8526bade7971"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.773172 5014 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.779591 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.840444 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-combined-ca-bundle\") pod \"e0ab7533-dc12-4a86-9191-fb6e11db2740\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.840852 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-scripts\") pod \"e0ab7533-dc12-4a86-9191-fb6e11db2740\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.840909 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9d87b\" (UniqueName: \"kubernetes.io/projected/e0ab7533-dc12-4a86-9191-fb6e11db2740-kube-api-access-9d87b\") pod \"e0ab7533-dc12-4a86-9191-fb6e11db2740\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.840943 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-run-httpd\") pod \"e0ab7533-dc12-4a86-9191-fb6e11db2740\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.841123 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-sg-core-conf-yaml\") pod \"e0ab7533-dc12-4a86-9191-fb6e11db2740\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.841170 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-log-httpd\") pod \"e0ab7533-dc12-4a86-9191-fb6e11db2740\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.841231 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-config-data\") pod \"e0ab7533-dc12-4a86-9191-fb6e11db2740\" (UID: \"e0ab7533-dc12-4a86-9191-fb6e11db2740\") " Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.842160 5014 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.842240 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.842415 5014 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ee1ae65-148c-40e6-afc5-8526bade7971-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.843318 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e0ab7533-dc12-4a86-9191-fb6e11db2740" (UID: "e0ab7533-dc12-4a86-9191-fb6e11db2740"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.845360 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-scripts" (OuterVolumeSpecName: "scripts") pod "e0ab7533-dc12-4a86-9191-fb6e11db2740" (UID: "e0ab7533-dc12-4a86-9191-fb6e11db2740"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.846995 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e0ab7533-dc12-4a86-9191-fb6e11db2740" (UID: "e0ab7533-dc12-4a86-9191-fb6e11db2740"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.850568 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0ab7533-dc12-4a86-9191-fb6e11db2740-kube-api-access-9d87b" (OuterVolumeSpecName: "kube-api-access-9d87b") pod "e0ab7533-dc12-4a86-9191-fb6e11db2740" (UID: "e0ab7533-dc12-4a86-9191-fb6e11db2740"). InnerVolumeSpecName "kube-api-access-9d87b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.874203 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e0ab7533-dc12-4a86-9191-fb6e11db2740" (UID: "e0ab7533-dc12-4a86-9191-fb6e11db2740"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.944445 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.944485 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9d87b\" (UniqueName: \"kubernetes.io/projected/e0ab7533-dc12-4a86-9191-fb6e11db2740-kube-api-access-9d87b\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.944497 5014 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.944507 5014 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.944516 5014 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0ab7533-dc12-4a86-9191-fb6e11db2740-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.951369 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0ab7533-dc12-4a86-9191-fb6e11db2740" (UID: "e0ab7533-dc12-4a86-9191-fb6e11db2740"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:51 crc kubenswrapper[5014]: I1205 11:09:51.990457 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-config-data" (OuterVolumeSpecName: "config-data") pod "e0ab7533-dc12-4a86-9191-fb6e11db2740" (UID: "e0ab7533-dc12-4a86-9191-fb6e11db2740"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.046223 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.046259 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0ab7533-dc12-4a86-9191-fb6e11db2740-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.291737 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0ee1ae65-148c-40e6-afc5-8526bade7971","Type":"ContainerDied","Data":"6f4cfd6805c0bd988a864e370b2ee66fd10a0d6d6ec4b6e7f1d0b54bf2afee27"} Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.292083 5014 scope.go:117] "RemoveContainer" containerID="c5a5f72ca8e40c159108bdc35c106f726ac999ab72cc675407f910dc256e5823" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.291775 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.299259 5014 generic.go:334] "Generic (PLEG): container finished" podID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerID="734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae" exitCode=0 Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.299406 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0ab7533-dc12-4a86-9191-fb6e11db2740","Type":"ContainerDied","Data":"734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae"} Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.299438 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0ab7533-dc12-4a86-9191-fb6e11db2740","Type":"ContainerDied","Data":"29c747d650d65892299313ea83c502cede8dd48f3259057ea979457bf3cd0c61"} Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.299526 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.346760 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.354060 5014 scope.go:117] "RemoveContainer" containerID="99b6d98c73190b35e4f1923ab1019f752510f57bd4b744606254ee60f1969619" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.364339 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.382335 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.392110 5014 scope.go:117] "RemoveContainer" containerID="780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.393541 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.402120 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:09:52 crc kubenswrapper[5014]: E1205 11:09:52.402735 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="proxy-httpd" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.402755 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="proxy-httpd" Dec 05 11:09:52 crc kubenswrapper[5014]: E1205 11:09:52.402774 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ee1ae65-148c-40e6-afc5-8526bade7971" containerName="glance-log" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.402781 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ee1ae65-148c-40e6-afc5-8526bade7971" containerName="glance-log" Dec 05 11:09:52 crc kubenswrapper[5014]: E1205 11:09:52.402800 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="ceilometer-central-agent" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.402808 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="ceilometer-central-agent" Dec 05 11:09:52 crc kubenswrapper[5014]: E1205 11:09:52.402826 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="sg-core" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.402843 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="sg-core" Dec 05 11:09:52 crc kubenswrapper[5014]: E1205 11:09:52.402859 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="ceilometer-notification-agent" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.402867 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="ceilometer-notification-agent" Dec 05 11:09:52 crc kubenswrapper[5014]: E1205 11:09:52.402878 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ee1ae65-148c-40e6-afc5-8526bade7971" containerName="glance-httpd" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.402885 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ee1ae65-148c-40e6-afc5-8526bade7971" containerName="glance-httpd" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.403037 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="ceilometer-notification-agent" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.403052 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ee1ae65-148c-40e6-afc5-8526bade7971" containerName="glance-httpd" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.403059 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="sg-core" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.403078 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="proxy-httpd" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.403088 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" containerName="ceilometer-central-agent" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.403099 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ee1ae65-148c-40e6-afc5-8526bade7971" containerName="glance-log" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.404110 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.409011 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.410388 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.410730 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.430515 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.430764 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.431630 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.435673 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.435934 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.452801 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.476752 5014 scope.go:117] "RemoveContainer" containerID="d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.501544 5014 scope.go:117] "RemoveContainer" containerID="4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557121 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557211 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-config-data\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557244 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557306 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557350 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw6ld\" (UniqueName: \"kubernetes.io/projected/927d96cb-db91-42ec-8963-4b1259c7b65f-kube-api-access-fw6ld\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557380 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwkth\" (UniqueName: \"kubernetes.io/projected/041081c2-6470-40f7-945d-43ac9a3d716f-kube-api-access-hwkth\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557416 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557442 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557463 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-scripts\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557497 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-log-httpd\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557515 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/927d96cb-db91-42ec-8963-4b1259c7b65f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557541 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557630 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557683 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/927d96cb-db91-42ec-8963-4b1259c7b65f-logs\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.557736 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-run-httpd\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.558049 5014 scope.go:117] "RemoveContainer" containerID="734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.582863 5014 scope.go:117] "RemoveContainer" containerID="780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14" Dec 05 11:09:52 crc kubenswrapper[5014]: E1205 11:09:52.583315 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14\": container with ID starting with 780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14 not found: ID does not exist" containerID="780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.583339 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14"} err="failed to get container status \"780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14\": rpc error: code = NotFound desc = could not find container \"780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14\": container with ID starting with 780277aa519822b903b1e85ec7432ac5dd6316ee7967c6f7c42024f41b41bc14 not found: ID does not exist" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.583363 5014 scope.go:117] "RemoveContainer" containerID="d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8" Dec 05 11:09:52 crc kubenswrapper[5014]: E1205 11:09:52.583736 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8\": container with ID starting with d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8 not found: ID does not exist" containerID="d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.583789 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8"} err="failed to get container status \"d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8\": rpc error: code = NotFound desc = could not find container \"d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8\": container with ID starting with d85a2cf0dfd1ffa40ad6deb5795efd41a56cb05f197658ea8e8f478dcc6f5ca8 not found: ID does not exist" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.583817 5014 scope.go:117] "RemoveContainer" containerID="4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a" Dec 05 11:09:52 crc kubenswrapper[5014]: E1205 11:09:52.584156 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a\": container with ID starting with 4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a not found: ID does not exist" containerID="4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.584179 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a"} err="failed to get container status \"4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a\": rpc error: code = NotFound desc = could not find container \"4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a\": container with ID starting with 4381c55fbbe1fc310a450a4c11a010c5141c2a786c45a9d5d637f79a8a59186a not found: ID does not exist" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.584192 5014 scope.go:117] "RemoveContainer" containerID="734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae" Dec 05 11:09:52 crc kubenswrapper[5014]: E1205 11:09:52.584483 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae\": container with ID starting with 734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae not found: ID does not exist" containerID="734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.584525 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae"} err="failed to get container status \"734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae\": rpc error: code = NotFound desc = could not find container \"734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae\": container with ID starting with 734f9fd4d642958f636640a3981e20fd8232967ab26722d2b017107ceaa0a8ae not found: ID does not exist" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660133 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660232 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw6ld\" (UniqueName: \"kubernetes.io/projected/927d96cb-db91-42ec-8963-4b1259c7b65f-kube-api-access-fw6ld\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660369 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwkth\" (UniqueName: \"kubernetes.io/projected/041081c2-6470-40f7-945d-43ac9a3d716f-kube-api-access-hwkth\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660412 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660435 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660455 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-scripts\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660484 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-log-httpd\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660500 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/927d96cb-db91-42ec-8963-4b1259c7b65f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660525 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660578 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660592 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/927d96cb-db91-42ec-8963-4b1259c7b65f-logs\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660609 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-run-httpd\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660636 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660698 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-config-data\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.660719 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.661142 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.661440 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-log-httpd\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.661534 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/927d96cb-db91-42ec-8963-4b1259c7b65f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.661695 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-run-httpd\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.661957 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/927d96cb-db91-42ec-8963-4b1259c7b65f-logs\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.666766 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.667681 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.667794 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.668028 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-config-data\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.668266 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-scripts\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.682200 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.682395 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.683048 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/927d96cb-db91-42ec-8963-4b1259c7b65f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.686594 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw6ld\" (UniqueName: \"kubernetes.io/projected/927d96cb-db91-42ec-8963-4b1259c7b65f-kube-api-access-fw6ld\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.688638 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwkth\" (UniqueName: \"kubernetes.io/projected/041081c2-6470-40f7-945d-43ac9a3d716f-kube-api-access-hwkth\") pod \"ceilometer-0\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " pod="openstack/ceilometer-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.740728 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"927d96cb-db91-42ec-8963-4b1259c7b65f\") " pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.776001 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 11:09:52 crc kubenswrapper[5014]: I1205 11:09:52.788401 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.190494 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.356228 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ee1ae65-148c-40e6-afc5-8526bade7971" path="/var/lib/kubelet/pods/0ee1ae65-148c-40e6-afc5-8526bade7971/volumes" Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.357189 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4" path="/var/lib/kubelet/pods/7bbdbdf1-2a4b-43da-bb88-a5e6d3cbd0b4/volumes" Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.357990 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0ab7533-dc12-4a86-9191-fb6e11db2740" path="/var/lib/kubelet/pods/e0ab7533-dc12-4a86-9191-fb6e11db2740/volumes" Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.359424 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.359457 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.359472 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c59756a0-84f2-4678-9294-aaa2475d08ec","Type":"ContainerStarted","Data":"b6ea7ccc5466c79c6dcd08fbd0ac8071ec8db1a5a0c13c8bb65c0d3a0701aaa4"} Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.359490 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c59756a0-84f2-4678-9294-aaa2475d08ec","Type":"ContainerStarted","Data":"a3176b60ef5b52c62d449b84ed227575eb242cdd3711d955c10604de7a6a23f4"} Dec 05 11:09:53 crc kubenswrapper[5014]: W1205 11:09:53.380461 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod927d96cb_db91_42ec_8963_4b1259c7b65f.slice/crio-340f9be44a4da2b8d0f77d53260ffb67ee6d6ecc71194049fbb53fb692d31ce0 WatchSource:0}: Error finding container 340f9be44a4da2b8d0f77d53260ffb67ee6d6ecc71194049fbb53fb692d31ce0: Status 404 returned error can't find the container with id 340f9be44a4da2b8d0f77d53260ffb67ee6d6ecc71194049fbb53fb692d31ce0 Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.962777 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xjq65"] Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.975755 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xjq65"] Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.975861 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.978665 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.979728 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-6g4gm" Dec 05 11:09:53 crc kubenswrapper[5014]: I1205 11:09:53.980214 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.099900 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r4j5\" (UniqueName: \"kubernetes.io/projected/d5301290-e01e-40a5-ba62-bec11488a2e6-kube-api-access-4r4j5\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.100280 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-config-data\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.100366 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.100389 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-scripts\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.201948 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r4j5\" (UniqueName: \"kubernetes.io/projected/d5301290-e01e-40a5-ba62-bec11488a2e6-kube-api-access-4r4j5\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.202039 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-config-data\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.202150 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.202176 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-scripts\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.206475 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-scripts\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.206959 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-config-data\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.208823 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.228116 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r4j5\" (UniqueName: \"kubernetes.io/projected/d5301290-e01e-40a5-ba62-bec11488a2e6-kube-api-access-4r4j5\") pod \"nova-cell0-conductor-db-sync-xjq65\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.363523 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c59756a0-84f2-4678-9294-aaa2475d08ec","Type":"ContainerStarted","Data":"5c3a77fa9be75021a4bf3db7073ffab19540a46dc926f5b0122b6034a3cd12c6"} Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.365173 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"041081c2-6470-40f7-945d-43ac9a3d716f","Type":"ContainerStarted","Data":"2b8ee81182b630640bfddb0b8b34db37af4726e6c2d3e14a5bf54367925f1f44"} Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.371331 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"927d96cb-db91-42ec-8963-4b1259c7b65f","Type":"ContainerStarted","Data":"ca826b29181c9835336f6e3028608434de8484dcfcb795ae8f2e4343ff1a3e12"} Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.371382 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"927d96cb-db91-42ec-8963-4b1259c7b65f","Type":"ContainerStarted","Data":"340f9be44a4da2b8d0f77d53260ffb67ee6d6ecc71194049fbb53fb692d31ce0"} Dec 05 11:09:54 crc kubenswrapper[5014]: I1205 11:09:54.442689 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:09:55 crc kubenswrapper[5014]: W1205 11:09:55.019924 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5301290_e01e_40a5_ba62_bec11488a2e6.slice/crio-b0153adc9e9339f2cb09ad30c3f3be5f61c99e4fbb30ca570a8a777e12457667 WatchSource:0}: Error finding container b0153adc9e9339f2cb09ad30c3f3be5f61c99e4fbb30ca570a8a777e12457667: Status 404 returned error can't find the container with id b0153adc9e9339f2cb09ad30c3f3be5f61c99e4fbb30ca570a8a777e12457667 Dec 05 11:09:55 crc kubenswrapper[5014]: I1205 11:09:55.025289 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.025252351 podStartE2EDuration="4.025252351s" podCreationTimestamp="2025-12-05 11:09:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:54.410561657 +0000 UTC m=+1321.358679381" watchObservedRunningTime="2025-12-05 11:09:55.025252351 +0000 UTC m=+1321.973370055" Dec 05 11:09:55 crc kubenswrapper[5014]: I1205 11:09:55.026960 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xjq65"] Dec 05 11:09:55 crc kubenswrapper[5014]: I1205 11:09:55.382775 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"927d96cb-db91-42ec-8963-4b1259c7b65f","Type":"ContainerStarted","Data":"7a948e2f07dc9af3b225e92ea93b0a3573745299bc8af2d58f19de291b55f02a"} Dec 05 11:09:55 crc kubenswrapper[5014]: I1205 11:09:55.384399 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xjq65" event={"ID":"d5301290-e01e-40a5-ba62-bec11488a2e6","Type":"ContainerStarted","Data":"b0153adc9e9339f2cb09ad30c3f3be5f61c99e4fbb30ca570a8a777e12457667"} Dec 05 11:09:55 crc kubenswrapper[5014]: I1205 11:09:55.386397 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"041081c2-6470-40f7-945d-43ac9a3d716f","Type":"ContainerStarted","Data":"28543d682ca7d976e317263f0106ea981855d1a73792061f5ef607b52bf1f64a"} Dec 05 11:09:55 crc kubenswrapper[5014]: I1205 11:09:55.386459 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"041081c2-6470-40f7-945d-43ac9a3d716f","Type":"ContainerStarted","Data":"c5512328fee55de18c9020615a218b5fb6c0e91aa1b2066db9f0d9a5b10a1f54"} Dec 05 11:09:55 crc kubenswrapper[5014]: I1205 11:09:55.413457 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.413437423 podStartE2EDuration="3.413437423s" podCreationTimestamp="2025-12-05 11:09:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:09:55.402486479 +0000 UTC m=+1322.350604203" watchObservedRunningTime="2025-12-05 11:09:55.413437423 +0000 UTC m=+1322.361555137" Dec 05 11:09:56 crc kubenswrapper[5014]: I1205 11:09:56.402889 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"041081c2-6470-40f7-945d-43ac9a3d716f","Type":"ContainerStarted","Data":"0caa6a7b03cb0426236066b9f07c225523f7f5c39facb2776a9662fc96cafbe7"} Dec 05 11:09:57 crc kubenswrapper[5014]: I1205 11:09:57.415483 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"041081c2-6470-40f7-945d-43ac9a3d716f","Type":"ContainerStarted","Data":"4571572a0cfb81ed5844a8a3587eb346db293e0df4b19c022c00f726d74a9ea2"} Dec 05 11:09:57 crc kubenswrapper[5014]: I1205 11:09:57.416400 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:09:57 crc kubenswrapper[5014]: I1205 11:09:57.415760 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="sg-core" containerID="cri-o://0caa6a7b03cb0426236066b9f07c225523f7f5c39facb2776a9662fc96cafbe7" gracePeriod=30 Dec 05 11:09:57 crc kubenswrapper[5014]: I1205 11:09:57.415747 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="proxy-httpd" containerID="cri-o://4571572a0cfb81ed5844a8a3587eb346db293e0df4b19c022c00f726d74a9ea2" gracePeriod=30 Dec 05 11:09:57 crc kubenswrapper[5014]: I1205 11:09:57.415776 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="ceilometer-notification-agent" containerID="cri-o://28543d682ca7d976e317263f0106ea981855d1a73792061f5ef607b52bf1f64a" gracePeriod=30 Dec 05 11:09:57 crc kubenswrapper[5014]: I1205 11:09:57.415697 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="ceilometer-central-agent" containerID="cri-o://c5512328fee55de18c9020615a218b5fb6c0e91aa1b2066db9f0d9a5b10a1f54" gracePeriod=30 Dec 05 11:09:57 crc kubenswrapper[5014]: I1205 11:09:57.461721 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.78164995 podStartE2EDuration="5.461688769s" podCreationTimestamp="2025-12-05 11:09:52 +0000 UTC" firstStartedPulling="2025-12-05 11:09:53.351566359 +0000 UTC m=+1320.299684063" lastFinishedPulling="2025-12-05 11:09:57.031605168 +0000 UTC m=+1323.979722882" observedRunningTime="2025-12-05 11:09:57.453912162 +0000 UTC m=+1324.402029876" watchObservedRunningTime="2025-12-05 11:09:57.461688769 +0000 UTC m=+1324.409806493" Dec 05 11:09:58 crc kubenswrapper[5014]: I1205 11:09:58.428896 5014 generic.go:334] "Generic (PLEG): container finished" podID="041081c2-6470-40f7-945d-43ac9a3d716f" containerID="0caa6a7b03cb0426236066b9f07c225523f7f5c39facb2776a9662fc96cafbe7" exitCode=2 Dec 05 11:09:58 crc kubenswrapper[5014]: I1205 11:09:58.428970 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"041081c2-6470-40f7-945d-43ac9a3d716f","Type":"ContainerDied","Data":"0caa6a7b03cb0426236066b9f07c225523f7f5c39facb2776a9662fc96cafbe7"} Dec 05 11:09:58 crc kubenswrapper[5014]: I1205 11:09:58.430198 5014 generic.go:334] "Generic (PLEG): container finished" podID="041081c2-6470-40f7-945d-43ac9a3d716f" containerID="28543d682ca7d976e317263f0106ea981855d1a73792061f5ef607b52bf1f64a" exitCode=0 Dec 05 11:09:58 crc kubenswrapper[5014]: I1205 11:09:58.430232 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"041081c2-6470-40f7-945d-43ac9a3d716f","Type":"ContainerDied","Data":"28543d682ca7d976e317263f0106ea981855d1a73792061f5ef607b52bf1f64a"} Dec 05 11:10:01 crc kubenswrapper[5014]: I1205 11:10:01.781422 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 11:10:01 crc kubenswrapper[5014]: I1205 11:10:01.781983 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 11:10:01 crc kubenswrapper[5014]: I1205 11:10:01.811784 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 11:10:01 crc kubenswrapper[5014]: I1205 11:10:01.824484 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 11:10:02 crc kubenswrapper[5014]: I1205 11:10:02.477176 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 11:10:02 crc kubenswrapper[5014]: I1205 11:10:02.477228 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 11:10:02 crc kubenswrapper[5014]: I1205 11:10:02.776752 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 11:10:02 crc kubenswrapper[5014]: I1205 11:10:02.777057 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 11:10:02 crc kubenswrapper[5014]: I1205 11:10:02.809170 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 11:10:02 crc kubenswrapper[5014]: I1205 11:10:02.820716 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 11:10:03 crc kubenswrapper[5014]: I1205 11:10:03.488144 5014 generic.go:334] "Generic (PLEG): container finished" podID="041081c2-6470-40f7-945d-43ac9a3d716f" containerID="c5512328fee55de18c9020615a218b5fb6c0e91aa1b2066db9f0d9a5b10a1f54" exitCode=0 Dec 05 11:10:03 crc kubenswrapper[5014]: I1205 11:10:03.488258 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"041081c2-6470-40f7-945d-43ac9a3d716f","Type":"ContainerDied","Data":"c5512328fee55de18c9020615a218b5fb6c0e91aa1b2066db9f0d9a5b10a1f54"} Dec 05 11:10:03 crc kubenswrapper[5014]: I1205 11:10:03.490960 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xjq65" event={"ID":"d5301290-e01e-40a5-ba62-bec11488a2e6","Type":"ContainerStarted","Data":"ef2d66cbce0da6e6eb448a6ee733ee3758c0635fa383059619b9caee5715929c"} Dec 05 11:10:03 crc kubenswrapper[5014]: I1205 11:10:03.491855 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 11:10:03 crc kubenswrapper[5014]: I1205 11:10:03.491908 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 11:10:03 crc kubenswrapper[5014]: I1205 11:10:03.514348 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-xjq65" podStartSLOduration=2.613494798 podStartE2EDuration="10.51432382s" podCreationTimestamp="2025-12-05 11:09:53 +0000 UTC" firstStartedPulling="2025-12-05 11:09:55.035513607 +0000 UTC m=+1321.983631311" lastFinishedPulling="2025-12-05 11:10:02.936342619 +0000 UTC m=+1329.884460333" observedRunningTime="2025-12-05 11:10:03.504176336 +0000 UTC m=+1330.452294050" watchObservedRunningTime="2025-12-05 11:10:03.51432382 +0000 UTC m=+1330.462441524" Dec 05 11:10:04 crc kubenswrapper[5014]: I1205 11:10:04.481567 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 11:10:04 crc kubenswrapper[5014]: I1205 11:10:04.482880 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 11:10:05 crc kubenswrapper[5014]: I1205 11:10:05.447811 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 11:10:05 crc kubenswrapper[5014]: I1205 11:10:05.448101 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 11:10:16 crc kubenswrapper[5014]: I1205 11:10:16.606864 5014 generic.go:334] "Generic (PLEG): container finished" podID="d5301290-e01e-40a5-ba62-bec11488a2e6" containerID="ef2d66cbce0da6e6eb448a6ee733ee3758c0635fa383059619b9caee5715929c" exitCode=0 Dec 05 11:10:16 crc kubenswrapper[5014]: I1205 11:10:16.606960 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xjq65" event={"ID":"d5301290-e01e-40a5-ba62-bec11488a2e6","Type":"ContainerDied","Data":"ef2d66cbce0da6e6eb448a6ee733ee3758c0635fa383059619b9caee5715929c"} Dec 05 11:10:17 crc kubenswrapper[5014]: I1205 11:10:17.994700 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.104321 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r4j5\" (UniqueName: \"kubernetes.io/projected/d5301290-e01e-40a5-ba62-bec11488a2e6-kube-api-access-4r4j5\") pod \"d5301290-e01e-40a5-ba62-bec11488a2e6\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.104466 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-combined-ca-bundle\") pod \"d5301290-e01e-40a5-ba62-bec11488a2e6\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.104499 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-config-data\") pod \"d5301290-e01e-40a5-ba62-bec11488a2e6\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.104539 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-scripts\") pod \"d5301290-e01e-40a5-ba62-bec11488a2e6\" (UID: \"d5301290-e01e-40a5-ba62-bec11488a2e6\") " Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.112458 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-scripts" (OuterVolumeSpecName: "scripts") pod "d5301290-e01e-40a5-ba62-bec11488a2e6" (UID: "d5301290-e01e-40a5-ba62-bec11488a2e6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.112495 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5301290-e01e-40a5-ba62-bec11488a2e6-kube-api-access-4r4j5" (OuterVolumeSpecName: "kube-api-access-4r4j5") pod "d5301290-e01e-40a5-ba62-bec11488a2e6" (UID: "d5301290-e01e-40a5-ba62-bec11488a2e6"). InnerVolumeSpecName "kube-api-access-4r4j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.136386 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-config-data" (OuterVolumeSpecName: "config-data") pod "d5301290-e01e-40a5-ba62-bec11488a2e6" (UID: "d5301290-e01e-40a5-ba62-bec11488a2e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.136557 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d5301290-e01e-40a5-ba62-bec11488a2e6" (UID: "d5301290-e01e-40a5-ba62-bec11488a2e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.206361 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.206401 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.206415 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5301290-e01e-40a5-ba62-bec11488a2e6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.206428 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r4j5\" (UniqueName: \"kubernetes.io/projected/d5301290-e01e-40a5-ba62-bec11488a2e6-kube-api-access-4r4j5\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.628860 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xjq65" event={"ID":"d5301290-e01e-40a5-ba62-bec11488a2e6","Type":"ContainerDied","Data":"b0153adc9e9339f2cb09ad30c3f3be5f61c99e4fbb30ca570a8a777e12457667"} Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.629188 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0153adc9e9339f2cb09ad30c3f3be5f61c99e4fbb30ca570a8a777e12457667" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.628895 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xjq65" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.741121 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 11:10:18 crc kubenswrapper[5014]: E1205 11:10:18.741696 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5301290-e01e-40a5-ba62-bec11488a2e6" containerName="nova-cell0-conductor-db-sync" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.741718 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5301290-e01e-40a5-ba62-bec11488a2e6" containerName="nova-cell0-conductor-db-sync" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.741924 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5301290-e01e-40a5-ba62-bec11488a2e6" containerName="nova-cell0-conductor-db-sync" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.742603 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.745090 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-6g4gm" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.747505 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.761145 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.817692 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/533479f0-4f9e-46b8-a2b8-b0eea26ae3bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.817785 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/533479f0-4f9e-46b8-a2b8-b0eea26ae3bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.817872 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq27g\" (UniqueName: \"kubernetes.io/projected/533479f0-4f9e-46b8-a2b8-b0eea26ae3bc-kube-api-access-wq27g\") pod \"nova-cell0-conductor-0\" (UID: \"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.921129 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq27g\" (UniqueName: \"kubernetes.io/projected/533479f0-4f9e-46b8-a2b8-b0eea26ae3bc-kube-api-access-wq27g\") pod \"nova-cell0-conductor-0\" (UID: \"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.921333 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/533479f0-4f9e-46b8-a2b8-b0eea26ae3bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.921485 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/533479f0-4f9e-46b8-a2b8-b0eea26ae3bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.930176 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/533479f0-4f9e-46b8-a2b8-b0eea26ae3bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.930974 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/533479f0-4f9e-46b8-a2b8-b0eea26ae3bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:18 crc kubenswrapper[5014]: I1205 11:10:18.936917 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq27g\" (UniqueName: \"kubernetes.io/projected/533479f0-4f9e-46b8-a2b8-b0eea26ae3bc-kube-api-access-wq27g\") pod \"nova-cell0-conductor-0\" (UID: \"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc\") " pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:19 crc kubenswrapper[5014]: I1205 11:10:19.108785 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:19 crc kubenswrapper[5014]: I1205 11:10:19.597666 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 11:10:19 crc kubenswrapper[5014]: I1205 11:10:19.638967 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc","Type":"ContainerStarted","Data":"17e46bab0da80cf7b4e914964d806306c302780bcbb9fcee7fa2920fd19bc9a4"} Dec 05 11:10:20 crc kubenswrapper[5014]: I1205 11:10:20.649897 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"533479f0-4f9e-46b8-a2b8-b0eea26ae3bc","Type":"ContainerStarted","Data":"49e814f2d065f060f7f1158fa76a21bcf06dca4d0f35b0de04f2f555c9b5485b"} Dec 05 11:10:20 crc kubenswrapper[5014]: I1205 11:10:20.650494 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:20 crc kubenswrapper[5014]: I1205 11:10:20.667761 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.667739637 podStartE2EDuration="2.667739637s" podCreationTimestamp="2025-12-05 11:10:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:20.664008227 +0000 UTC m=+1347.612125931" watchObservedRunningTime="2025-12-05 11:10:20.667739637 +0000 UTC m=+1347.615857341" Dec 05 11:10:22 crc kubenswrapper[5014]: I1205 11:10:22.793229 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.144258 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.608101 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-xkr6j"] Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.609418 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.612511 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.615200 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.632153 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-xkr6j"] Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.739516 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-config-data\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.739583 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.739644 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-scripts\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.739694 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt9q2\" (UniqueName: \"kubernetes.io/projected/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-kube-api-access-gt9q2\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.841941 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-scripts\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.842040 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt9q2\" (UniqueName: \"kubernetes.io/projected/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-kube-api-access-gt9q2\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.842229 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-config-data\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.842291 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.851053 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-config-data\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.864964 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-scripts\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.865292 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.867193 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.870985 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.872165 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.878375 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.941010 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt9q2\" (UniqueName: \"kubernetes.io/projected/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-kube-api-access-gt9q2\") pod \"nova-cell0-cell-mapping-xkr6j\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.944487 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-config-data\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.944619 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dntfp\" (UniqueName: \"kubernetes.io/projected/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-kube-api-access-dntfp\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.944718 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-logs\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.944755 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.958572 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.960562 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:10:24 crc kubenswrapper[5014]: I1205 11:10:24.968765 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.001319 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.056407 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmrnd\" (UniqueName: \"kubernetes.io/projected/d01fa8c4-f7ca-4c62-912d-5388c911193b-kube-api-access-tmrnd\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.056480 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-logs\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.056517 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d01fa8c4-f7ca-4c62-912d-5388c911193b-logs\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.056551 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.056675 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-config-data\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.056796 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-config-data\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.056860 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dntfp\" (UniqueName: \"kubernetes.io/projected/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-kube-api-access-dntfp\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.056881 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.062011 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-logs\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.095343 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.107053 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-config-data\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.131443 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.132941 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.148232 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.168290 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.169603 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.169736 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmrnd\" (UniqueName: \"kubernetes.io/projected/d01fa8c4-f7ca-4c62-912d-5388c911193b-kube-api-access-tmrnd\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.169769 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d01fa8c4-f7ca-4c62-912d-5388c911193b-logs\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.169893 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-config-data\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.178843 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dntfp\" (UniqueName: \"kubernetes.io/projected/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-kube-api-access-dntfp\") pod \"nova-api-0\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " pod="openstack/nova-api-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.191742 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-h9t5f"] Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.193961 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.228458 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.238022 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.240208 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d01fa8c4-f7ca-4c62-912d-5388c911193b-logs\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.248046 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-config-data\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.257963 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmrnd\" (UniqueName: \"kubernetes.io/projected/d01fa8c4-f7ca-4c62-912d-5388c911193b-kube-api-access-tmrnd\") pod \"nova-metadata-0\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.300732 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8njlt\" (UniqueName: \"kubernetes.io/projected/3e927441-85c4-4909-8112-66fe509ddb4d-kube-api-access-8njlt\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.301079 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-svc\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.301186 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.301404 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldwf9\" (UniqueName: \"kubernetes.io/projected/c1290dfc-17fd-46a1-8f34-d1e338523945-kube-api-access-ldwf9\") pod \"nova-cell1-novncproxy-0\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.301530 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-config\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.301650 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.301759 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.301895 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.301994 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.316941 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-h9t5f"] Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.332662 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.362455 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.405962 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.406298 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldwf9\" (UniqueName: \"kubernetes.io/projected/c1290dfc-17fd-46a1-8f34-d1e338523945-kube-api-access-ldwf9\") pod \"nova-cell1-novncproxy-0\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.406387 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-config\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.406461 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.406519 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.406573 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.406605 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.406653 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8njlt\" (UniqueName: \"kubernetes.io/projected/3e927441-85c4-4909-8112-66fe509ddb4d-kube-api-access-8njlt\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.406691 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-svc\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.406715 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.407290 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.407373 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.407955 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.408779 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.409576 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-svc\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.410723 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-config\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.411344 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.416062 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.416223 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.422852 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.455098 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8njlt\" (UniqueName: \"kubernetes.io/projected/3e927441-85c4-4909-8112-66fe509ddb4d-kube-api-access-8njlt\") pod \"dnsmasq-dns-865f5d856f-h9t5f\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.471994 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldwf9\" (UniqueName: \"kubernetes.io/projected/c1290dfc-17fd-46a1-8f34-d1e338523945-kube-api-access-ldwf9\") pod \"nova-cell1-novncproxy-0\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.511072 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmw4v\" (UniqueName: \"kubernetes.io/projected/c2195caf-693e-4f84-bd70-106a0c927b27-kube-api-access-fmw4v\") pod \"nova-scheduler-0\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.511388 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.511818 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-config-data\") pod \"nova-scheduler-0\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.552001 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.563922 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.624434 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmw4v\" (UniqueName: \"kubernetes.io/projected/c2195caf-693e-4f84-bd70-106a0c927b27-kube-api-access-fmw4v\") pod \"nova-scheduler-0\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.624492 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.624609 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-config-data\") pod \"nova-scheduler-0\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.653241 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-config-data\") pod \"nova-scheduler-0\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.661836 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmw4v\" (UniqueName: \"kubernetes.io/projected/c2195caf-693e-4f84-bd70-106a0c927b27-kube-api-access-fmw4v\") pod \"nova-scheduler-0\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.674513 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:25 crc kubenswrapper[5014]: I1205 11:10:25.741479 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.061158 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-xkr6j"] Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.258833 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bb4zk"] Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.260702 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.263254 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.263560 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.285834 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pftk\" (UniqueName: \"kubernetes.io/projected/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-kube-api-access-4pftk\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.285921 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.285987 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-config-data\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.286013 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-scripts\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.305431 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bb4zk"] Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.345406 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.389180 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-config-data\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.389246 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-scripts\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.389378 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pftk\" (UniqueName: \"kubernetes.io/projected/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-kube-api-access-4pftk\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.389463 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.397942 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-config-data\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.398191 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.398903 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-scripts\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.419546 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pftk\" (UniqueName: \"kubernetes.io/projected/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-kube-api-access-4pftk\") pod \"nova-cell1-conductor-db-sync-bb4zk\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.436289 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-h9t5f"] Dec 05 11:10:26 crc kubenswrapper[5014]: W1205 11:10:26.436951 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e927441_85c4_4909_8112_66fe509ddb4d.slice/crio-060334d6a96fb8cb73f2ed6408dbdc1e52b96f0b1ef902bf582f8a32954d6ba5 WatchSource:0}: Error finding container 060334d6a96fb8cb73f2ed6408dbdc1e52b96f0b1ef902bf582f8a32954d6ba5: Status 404 returned error can't find the container with id 060334d6a96fb8cb73f2ed6408dbdc1e52b96f0b1ef902bf582f8a32954d6ba5 Dec 05 11:10:26 crc kubenswrapper[5014]: W1205 11:10:26.438186 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd01fa8c4_f7ca_4c62_912d_5388c911193b.slice/crio-111814d6ddbd276f225ca2be298681714554dbe5bba88bfafb747e4e06f045f9 WatchSource:0}: Error finding container 111814d6ddbd276f225ca2be298681714554dbe5bba88bfafb747e4e06f045f9: Status 404 returned error can't find the container with id 111814d6ddbd276f225ca2be298681714554dbe5bba88bfafb747e4e06f045f9 Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.444838 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:10:26 crc kubenswrapper[5014]: W1205 11:10:26.446595 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1290dfc_17fd_46a1_8f34_d1e338523945.slice/crio-f7afcb1a51def3e8c451160769f6f47017890a0c6b3f58336ab20be5e78ebec3 WatchSource:0}: Error finding container f7afcb1a51def3e8c451160769f6f47017890a0c6b3f58336ab20be5e78ebec3: Status 404 returned error can't find the container with id f7afcb1a51def3e8c451160769f6f47017890a0c6b3f58336ab20be5e78ebec3 Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.453036 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.605068 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:10:26 crc kubenswrapper[5014]: W1205 11:10:26.612410 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2195caf_693e_4f84_bd70_106a0c927b27.slice/crio-82aebf67f88aadaa7d176d2f6eb34cb584a515806630b73bfefec9745e820166 WatchSource:0}: Error finding container 82aebf67f88aadaa7d176d2f6eb34cb584a515806630b73bfefec9745e820166: Status 404 returned error can't find the container with id 82aebf67f88aadaa7d176d2f6eb34cb584a515806630b73bfefec9745e820166 Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.651051 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.765036 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xkr6j" event={"ID":"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf","Type":"ContainerStarted","Data":"47ef7cc458f3ef485675039a98477765e3ae2d9525caa0186c26df7be102e80b"} Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.765595 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xkr6j" event={"ID":"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf","Type":"ContainerStarted","Data":"b918b60fda99b70ad2a501847d3a35cb8fc35598393716780d2132a826e89ad3"} Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.777126 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c1290dfc-17fd-46a1-8f34-d1e338523945","Type":"ContainerStarted","Data":"f7afcb1a51def3e8c451160769f6f47017890a0c6b3f58336ab20be5e78ebec3"} Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.780660 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d796a63b-7c8c-450e-8b6f-f2fd239e13c9","Type":"ContainerStarted","Data":"171e420b1aad59286851b6d34b6e8ba56be471c196c26527266b23dbcdbd988c"} Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.785030 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2195caf-693e-4f84-bd70-106a0c927b27","Type":"ContainerStarted","Data":"82aebf67f88aadaa7d176d2f6eb34cb584a515806630b73bfefec9745e820166"} Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.786862 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-xkr6j" podStartSLOduration=2.786850387 podStartE2EDuration="2.786850387s" podCreationTimestamp="2025-12-05 11:10:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:26.784907251 +0000 UTC m=+1353.733024955" watchObservedRunningTime="2025-12-05 11:10:26.786850387 +0000 UTC m=+1353.734968091" Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.792184 5014 generic.go:334] "Generic (PLEG): container finished" podID="3e927441-85c4-4909-8112-66fe509ddb4d" containerID="42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44" exitCode=0 Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.792244 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" event={"ID":"3e927441-85c4-4909-8112-66fe509ddb4d","Type":"ContainerDied","Data":"42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44"} Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.792286 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" event={"ID":"3e927441-85c4-4909-8112-66fe509ddb4d","Type":"ContainerStarted","Data":"060334d6a96fb8cb73f2ed6408dbdc1e52b96f0b1ef902bf582f8a32954d6ba5"} Dec 05 11:10:26 crc kubenswrapper[5014]: I1205 11:10:26.800513 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d01fa8c4-f7ca-4c62-912d-5388c911193b","Type":"ContainerStarted","Data":"111814d6ddbd276f225ca2be298681714554dbe5bba88bfafb747e4e06f045f9"} Dec 05 11:10:27 crc kubenswrapper[5014]: I1205 11:10:27.172001 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bb4zk"] Dec 05 11:10:27 crc kubenswrapper[5014]: I1205 11:10:27.862463 5014 generic.go:334] "Generic (PLEG): container finished" podID="041081c2-6470-40f7-945d-43ac9a3d716f" containerID="4571572a0cfb81ed5844a8a3587eb346db293e0df4b19c022c00f726d74a9ea2" exitCode=137 Dec 05 11:10:27 crc kubenswrapper[5014]: I1205 11:10:27.862574 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"041081c2-6470-40f7-945d-43ac9a3d716f","Type":"ContainerDied","Data":"4571572a0cfb81ed5844a8a3587eb346db293e0df4b19c022c00f726d74a9ea2"} Dec 05 11:10:27 crc kubenswrapper[5014]: I1205 11:10:27.865578 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bb4zk" event={"ID":"167e48bc-c3dd-464f-b4ed-9109ffc6de7d","Type":"ContainerStarted","Data":"993c4f1fcff568c62368371c02074c7b709267753f182b64ea886085a3c51790"} Dec 05 11:10:27 crc kubenswrapper[5014]: I1205 11:10:27.865621 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bb4zk" event={"ID":"167e48bc-c3dd-464f-b4ed-9109ffc6de7d","Type":"ContainerStarted","Data":"3d3de94d66c6129575285f7d32cb81df815feceba21df1b6ddce4a60292ae422"} Dec 05 11:10:27 crc kubenswrapper[5014]: I1205 11:10:27.891367 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" event={"ID":"3e927441-85c4-4909-8112-66fe509ddb4d","Type":"ContainerStarted","Data":"5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed"} Dec 05 11:10:27 crc kubenswrapper[5014]: I1205 11:10:27.891542 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:27 crc kubenswrapper[5014]: I1205 11:10:27.904253 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-bb4zk" podStartSLOduration=1.904235289 podStartE2EDuration="1.904235289s" podCreationTimestamp="2025-12-05 11:10:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:27.890034888 +0000 UTC m=+1354.838152602" watchObservedRunningTime="2025-12-05 11:10:27.904235289 +0000 UTC m=+1354.852352993" Dec 05 11:10:27 crc kubenswrapper[5014]: I1205 11:10:27.935486 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" podStartSLOduration=2.935465132 podStartE2EDuration="2.935465132s" podCreationTimestamp="2025-12-05 11:10:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:27.911940265 +0000 UTC m=+1354.860057979" watchObservedRunningTime="2025-12-05 11:10:27.935465132 +0000 UTC m=+1354.883582836" Dec 05 11:10:27 crc kubenswrapper[5014]: I1205 11:10:27.967819 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.047223 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-run-httpd\") pod \"041081c2-6470-40f7-945d-43ac9a3d716f\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.047320 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-scripts\") pod \"041081c2-6470-40f7-945d-43ac9a3d716f\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.047377 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwkth\" (UniqueName: \"kubernetes.io/projected/041081c2-6470-40f7-945d-43ac9a3d716f-kube-api-access-hwkth\") pod \"041081c2-6470-40f7-945d-43ac9a3d716f\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.047402 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-config-data\") pod \"041081c2-6470-40f7-945d-43ac9a3d716f\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.047427 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-combined-ca-bundle\") pod \"041081c2-6470-40f7-945d-43ac9a3d716f\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.047478 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-sg-core-conf-yaml\") pod \"041081c2-6470-40f7-945d-43ac9a3d716f\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.047565 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-log-httpd\") pod \"041081c2-6470-40f7-945d-43ac9a3d716f\" (UID: \"041081c2-6470-40f7-945d-43ac9a3d716f\") " Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.048401 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "041081c2-6470-40f7-945d-43ac9a3d716f" (UID: "041081c2-6470-40f7-945d-43ac9a3d716f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.048543 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "041081c2-6470-40f7-945d-43ac9a3d716f" (UID: "041081c2-6470-40f7-945d-43ac9a3d716f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.052675 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/041081c2-6470-40f7-945d-43ac9a3d716f-kube-api-access-hwkth" (OuterVolumeSpecName: "kube-api-access-hwkth") pod "041081c2-6470-40f7-945d-43ac9a3d716f" (UID: "041081c2-6470-40f7-945d-43ac9a3d716f"). InnerVolumeSpecName "kube-api-access-hwkth". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.054744 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-scripts" (OuterVolumeSpecName: "scripts") pod "041081c2-6470-40f7-945d-43ac9a3d716f" (UID: "041081c2-6470-40f7-945d-43ac9a3d716f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.083097 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "041081c2-6470-40f7-945d-43ac9a3d716f" (UID: "041081c2-6470-40f7-945d-43ac9a3d716f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.150676 5014 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.150715 5014 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/041081c2-6470-40f7-945d-43ac9a3d716f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.150725 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.150740 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwkth\" (UniqueName: \"kubernetes.io/projected/041081c2-6470-40f7-945d-43ac9a3d716f-kube-api-access-hwkth\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.150750 5014 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.188569 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "041081c2-6470-40f7-945d-43ac9a3d716f" (UID: "041081c2-6470-40f7-945d-43ac9a3d716f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.252457 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-config-data" (OuterVolumeSpecName: "config-data") pod "041081c2-6470-40f7-945d-43ac9a3d716f" (UID: "041081c2-6470-40f7-945d-43ac9a3d716f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.254011 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.254043 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/041081c2-6470-40f7-945d-43ac9a3d716f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.611700 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.621484 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.918442 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"041081c2-6470-40f7-945d-43ac9a3d716f","Type":"ContainerDied","Data":"2b8ee81182b630640bfddb0b8b34db37af4726e6c2d3e14a5bf54367925f1f44"} Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.918777 5014 scope.go:117] "RemoveContainer" containerID="4571572a0cfb81ed5844a8a3587eb346db293e0df4b19c022c00f726d74a9ea2" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.918803 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:10:28 crc kubenswrapper[5014]: I1205 11:10:28.990156 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.003323 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.021312 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:10:29 crc kubenswrapper[5014]: E1205 11:10:29.021822 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="ceilometer-central-agent" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.021849 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="ceilometer-central-agent" Dec 05 11:10:29 crc kubenswrapper[5014]: E1205 11:10:29.021876 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="ceilometer-notification-agent" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.021883 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="ceilometer-notification-agent" Dec 05 11:10:29 crc kubenswrapper[5014]: E1205 11:10:29.021907 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="sg-core" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.021913 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="sg-core" Dec 05 11:10:29 crc kubenswrapper[5014]: E1205 11:10:29.021937 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="proxy-httpd" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.021943 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="proxy-httpd" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.022127 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="ceilometer-notification-agent" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.022155 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="proxy-httpd" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.022171 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="ceilometer-central-agent" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.022182 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" containerName="sg-core" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.023986 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.025874 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.026510 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.060727 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.069592 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-run-httpd\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.069643 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.069731 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-scripts\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.069752 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-config-data\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.069958 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddb4h\" (UniqueName: \"kubernetes.io/projected/743f670f-108f-446c-bee9-fbd36f5cf074-kube-api-access-ddb4h\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.069997 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.070031 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-log-httpd\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.171678 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-scripts\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.171718 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-config-data\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.171805 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddb4h\" (UniqueName: \"kubernetes.io/projected/743f670f-108f-446c-bee9-fbd36f5cf074-kube-api-access-ddb4h\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.171830 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.171901 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-log-httpd\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.171936 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-run-httpd\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.171961 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.172583 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-run-httpd\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.172909 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-log-httpd\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.178470 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.178564 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-config-data\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.180525 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.187112 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-scripts\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.192327 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddb4h\" (UniqueName: \"kubernetes.io/projected/743f670f-108f-446c-bee9-fbd36f5cf074-kube-api-access-ddb4h\") pod \"ceilometer-0\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " pod="openstack/ceilometer-0" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.328981 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="041081c2-6470-40f7-945d-43ac9a3d716f" path="/var/lib/kubelet/pods/041081c2-6470-40f7-945d-43ac9a3d716f/volumes" Dec 05 11:10:29 crc kubenswrapper[5014]: I1205 11:10:29.359361 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:10:30 crc kubenswrapper[5014]: I1205 11:10:30.867615 5014 scope.go:117] "RemoveContainer" containerID="0caa6a7b03cb0426236066b9f07c225523f7f5c39facb2776a9662fc96cafbe7" Dec 05 11:10:30 crc kubenswrapper[5014]: I1205 11:10:30.927989 5014 scope.go:117] "RemoveContainer" containerID="28543d682ca7d976e317263f0106ea981855d1a73792061f5ef607b52bf1f64a" Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.197058 5014 scope.go:117] "RemoveContainer" containerID="c5512328fee55de18c9020615a218b5fb6c0e91aa1b2066db9f0d9a5b10a1f54" Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.448365 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.948187 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743f670f-108f-446c-bee9-fbd36f5cf074","Type":"ContainerStarted","Data":"1efc1f451480891b1e00fbe806358a8913381fcc88ff603ae9a874939f686647"} Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.952023 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d01fa8c4-f7ca-4c62-912d-5388c911193b","Type":"ContainerStarted","Data":"45d7801c4bfdbc1a5a87bfaad03c7bb750e14bf1b9a0ba0915881452ba5f6760"} Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.952079 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d01fa8c4-f7ca-4c62-912d-5388c911193b","Type":"ContainerStarted","Data":"85d83dcbe1bc7061f710ec3f1ae0c72de01a155468630073f3a29e24c8640214"} Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.952226 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d01fa8c4-f7ca-4c62-912d-5388c911193b" containerName="nova-metadata-log" containerID="cri-o://85d83dcbe1bc7061f710ec3f1ae0c72de01a155468630073f3a29e24c8640214" gracePeriod=30 Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.952960 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d01fa8c4-f7ca-4c62-912d-5388c911193b" containerName="nova-metadata-metadata" containerID="cri-o://45d7801c4bfdbc1a5a87bfaad03c7bb750e14bf1b9a0ba0915881452ba5f6760" gracePeriod=30 Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.958847 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c1290dfc-17fd-46a1-8f34-d1e338523945","Type":"ContainerStarted","Data":"a270aa9e58e6d1e81130adf28324cb68cdf67724cf4377ac0f05b5314e1185c3"} Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.959380 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="c1290dfc-17fd-46a1-8f34-d1e338523945" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://a270aa9e58e6d1e81130adf28324cb68cdf67724cf4377ac0f05b5314e1185c3" gracePeriod=30 Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.972105 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d796a63b-7c8c-450e-8b6f-f2fd239e13c9","Type":"ContainerStarted","Data":"0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67"} Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.972386 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d796a63b-7c8c-450e-8b6f-f2fd239e13c9","Type":"ContainerStarted","Data":"ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363"} Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.974690 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.517675236 podStartE2EDuration="7.974667444s" podCreationTimestamp="2025-12-05 11:10:24 +0000 UTC" firstStartedPulling="2025-12-05 11:10:26.450965063 +0000 UTC m=+1353.399082767" lastFinishedPulling="2025-12-05 11:10:30.907957271 +0000 UTC m=+1357.856074975" observedRunningTime="2025-12-05 11:10:31.971542948 +0000 UTC m=+1358.919660672" watchObservedRunningTime="2025-12-05 11:10:31.974667444 +0000 UTC m=+1358.922785148" Dec 05 11:10:31 crc kubenswrapper[5014]: I1205 11:10:31.989849 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2195caf-693e-4f84-bd70-106a0c927b27","Type":"ContainerStarted","Data":"cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56"} Dec 05 11:10:32 crc kubenswrapper[5014]: I1205 11:10:32.013217 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.498634349 podStartE2EDuration="7.013170241s" podCreationTimestamp="2025-12-05 11:10:25 +0000 UTC" firstStartedPulling="2025-12-05 11:10:26.448392652 +0000 UTC m=+1353.396510346" lastFinishedPulling="2025-12-05 11:10:30.962928534 +0000 UTC m=+1357.911046238" observedRunningTime="2025-12-05 11:10:32.002768221 +0000 UTC m=+1358.950885935" watchObservedRunningTime="2025-12-05 11:10:32.013170241 +0000 UTC m=+1358.961287945" Dec 05 11:10:32 crc kubenswrapper[5014]: I1205 11:10:32.030550 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.339050097 podStartE2EDuration="8.030527108s" podCreationTimestamp="2025-12-05 11:10:24 +0000 UTC" firstStartedPulling="2025-12-05 11:10:26.273319538 +0000 UTC m=+1353.221437242" lastFinishedPulling="2025-12-05 11:10:30.964796549 +0000 UTC m=+1357.912914253" observedRunningTime="2025-12-05 11:10:32.018215582 +0000 UTC m=+1358.966333296" watchObservedRunningTime="2025-12-05 11:10:32.030527108 +0000 UTC m=+1358.978644812" Dec 05 11:10:32 crc kubenswrapper[5014]: I1205 11:10:32.041535 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.703900629 podStartE2EDuration="7.041517423s" podCreationTimestamp="2025-12-05 11:10:25 +0000 UTC" firstStartedPulling="2025-12-05 11:10:26.615089404 +0000 UTC m=+1353.563207108" lastFinishedPulling="2025-12-05 11:10:30.952706188 +0000 UTC m=+1357.900823902" observedRunningTime="2025-12-05 11:10:32.035813955 +0000 UTC m=+1358.983931679" watchObservedRunningTime="2025-12-05 11:10:32.041517423 +0000 UTC m=+1358.989635127" Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.009467 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743f670f-108f-446c-bee9-fbd36f5cf074","Type":"ContainerStarted","Data":"dca2fc7121ab0e756b20425edd88c2e47744b91c73aadc481d3f56f8fa58f61b"} Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.010085 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743f670f-108f-446c-bee9-fbd36f5cf074","Type":"ContainerStarted","Data":"f43a46717ad64328fd39abfe0a782181090756536ec76cf8f401f628a6b5c485"} Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.012010 5014 generic.go:334] "Generic (PLEG): container finished" podID="d01fa8c4-f7ca-4c62-912d-5388c911193b" containerID="45d7801c4bfdbc1a5a87bfaad03c7bb750e14bf1b9a0ba0915881452ba5f6760" exitCode=0 Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.012058 5014 generic.go:334] "Generic (PLEG): container finished" podID="d01fa8c4-f7ca-4c62-912d-5388c911193b" containerID="85d83dcbe1bc7061f710ec3f1ae0c72de01a155468630073f3a29e24c8640214" exitCode=143 Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.012592 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d01fa8c4-f7ca-4c62-912d-5388c911193b","Type":"ContainerDied","Data":"45d7801c4bfdbc1a5a87bfaad03c7bb750e14bf1b9a0ba0915881452ba5f6760"} Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.012648 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d01fa8c4-f7ca-4c62-912d-5388c911193b","Type":"ContainerDied","Data":"85d83dcbe1bc7061f710ec3f1ae0c72de01a155468630073f3a29e24c8640214"} Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.109371 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.157762 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmrnd\" (UniqueName: \"kubernetes.io/projected/d01fa8c4-f7ca-4c62-912d-5388c911193b-kube-api-access-tmrnd\") pod \"d01fa8c4-f7ca-4c62-912d-5388c911193b\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.157826 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-combined-ca-bundle\") pod \"d01fa8c4-f7ca-4c62-912d-5388c911193b\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.157947 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-config-data\") pod \"d01fa8c4-f7ca-4c62-912d-5388c911193b\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.158004 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d01fa8c4-f7ca-4c62-912d-5388c911193b-logs\") pod \"d01fa8c4-f7ca-4c62-912d-5388c911193b\" (UID: \"d01fa8c4-f7ca-4c62-912d-5388c911193b\") " Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.158367 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d01fa8c4-f7ca-4c62-912d-5388c911193b-logs" (OuterVolumeSpecName: "logs") pod "d01fa8c4-f7ca-4c62-912d-5388c911193b" (UID: "d01fa8c4-f7ca-4c62-912d-5388c911193b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.158434 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d01fa8c4-f7ca-4c62-912d-5388c911193b-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.185509 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d01fa8c4-f7ca-4c62-912d-5388c911193b-kube-api-access-tmrnd" (OuterVolumeSpecName: "kube-api-access-tmrnd") pod "d01fa8c4-f7ca-4c62-912d-5388c911193b" (UID: "d01fa8c4-f7ca-4c62-912d-5388c911193b"). InnerVolumeSpecName "kube-api-access-tmrnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.195411 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d01fa8c4-f7ca-4c62-912d-5388c911193b" (UID: "d01fa8c4-f7ca-4c62-912d-5388c911193b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.217399 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-config-data" (OuterVolumeSpecName: "config-data") pod "d01fa8c4-f7ca-4c62-912d-5388c911193b" (UID: "d01fa8c4-f7ca-4c62-912d-5388c911193b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.260765 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmrnd\" (UniqueName: \"kubernetes.io/projected/d01fa8c4-f7ca-4c62-912d-5388c911193b-kube-api-access-tmrnd\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.260798 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:33 crc kubenswrapper[5014]: I1205 11:10:33.260809 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d01fa8c4-f7ca-4c62-912d-5388c911193b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.040761 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d01fa8c4-f7ca-4c62-912d-5388c911193b","Type":"ContainerDied","Data":"111814d6ddbd276f225ca2be298681714554dbe5bba88bfafb747e4e06f045f9"} Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.040782 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.041092 5014 scope.go:117] "RemoveContainer" containerID="45d7801c4bfdbc1a5a87bfaad03c7bb750e14bf1b9a0ba0915881452ba5f6760" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.049817 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743f670f-108f-446c-bee9-fbd36f5cf074","Type":"ContainerStarted","Data":"5880170552fc383dd4b065a8d7328729e9a70eee657a91e8f42d0ac20d78abd9"} Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.065706 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.076467 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.080456 5014 scope.go:117] "RemoveContainer" containerID="85d83dcbe1bc7061f710ec3f1ae0c72de01a155468630073f3a29e24c8640214" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.099951 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:34 crc kubenswrapper[5014]: E1205 11:10:34.100496 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d01fa8c4-f7ca-4c62-912d-5388c911193b" containerName="nova-metadata-metadata" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.100524 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d01fa8c4-f7ca-4c62-912d-5388c911193b" containerName="nova-metadata-metadata" Dec 05 11:10:34 crc kubenswrapper[5014]: E1205 11:10:34.100548 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d01fa8c4-f7ca-4c62-912d-5388c911193b" containerName="nova-metadata-log" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.100561 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d01fa8c4-f7ca-4c62-912d-5388c911193b" containerName="nova-metadata-log" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.100842 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d01fa8c4-f7ca-4c62-912d-5388c911193b" containerName="nova-metadata-metadata" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.100882 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d01fa8c4-f7ca-4c62-912d-5388c911193b" containerName="nova-metadata-log" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.102309 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.106493 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.107791 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.120522 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.176239 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.176305 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec59142e-26a3-473e-9172-4ad744fc8515-logs\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.176334 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwxwl\" (UniqueName: \"kubernetes.io/projected/ec59142e-26a3-473e-9172-4ad744fc8515-kube-api-access-rwxwl\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.176360 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-config-data\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.176390 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.278677 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.278745 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec59142e-26a3-473e-9172-4ad744fc8515-logs\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.278793 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwxwl\" (UniqueName: \"kubernetes.io/projected/ec59142e-26a3-473e-9172-4ad744fc8515-kube-api-access-rwxwl\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.278841 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-config-data\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.278874 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.280187 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec59142e-26a3-473e-9172-4ad744fc8515-logs\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.306461 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-config-data\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.320374 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.321308 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwxwl\" (UniqueName: \"kubernetes.io/projected/ec59142e-26a3-473e-9172-4ad744fc8515-kube-api-access-rwxwl\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.329561 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " pod="openstack/nova-metadata-0" Dec 05 11:10:34 crc kubenswrapper[5014]: I1205 11:10:34.425428 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.040996 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.063048 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec59142e-26a3-473e-9172-4ad744fc8515","Type":"ContainerStarted","Data":"52541a152e0b47067a92e6d48b5fc43e14b3e3c00488a587e0a54b87dfd326ea"} Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.335858 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d01fa8c4-f7ca-4c62-912d-5388c911193b" path="/var/lib/kubelet/pods/d01fa8c4-f7ca-4c62-912d-5388c911193b/volumes" Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.336921 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.336961 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.552930 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.575493 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.671664 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-pp7kf"] Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.671963 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" podUID="cd11abe8-3b00-430e-bade-62fd4e9047b6" containerName="dnsmasq-dns" containerID="cri-o://04b05ece9dec83e03596ae482c704b765ff2110ee78173c090cf624823fd8438" gracePeriod=10 Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.744624 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.744661 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 11:10:35 crc kubenswrapper[5014]: I1205 11:10:35.782048 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.075811 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743f670f-108f-446c-bee9-fbd36f5cf074","Type":"ContainerStarted","Data":"1f4d15bec7aa3e7349d8ae6ce62b966491b4a49d2794d7bf43d0022ab23cb6a2"} Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.076428 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.079362 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec59142e-26a3-473e-9172-4ad744fc8515","Type":"ContainerStarted","Data":"e065f2ee0bab982045a51ed31e48bc0b1b0e0f428ca9a840a8ee4698a9616c5f"} Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.079400 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec59142e-26a3-473e-9172-4ad744fc8515","Type":"ContainerStarted","Data":"75d3ad02d7cb1baee600cf63fd72bb6923d687fd4cd8241e6e41d0835077a7ef"} Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.082172 5014 generic.go:334] "Generic (PLEG): container finished" podID="cd11abe8-3b00-430e-bade-62fd4e9047b6" containerID="04b05ece9dec83e03596ae482c704b765ff2110ee78173c090cf624823fd8438" exitCode=0 Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.082438 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" event={"ID":"cd11abe8-3b00-430e-bade-62fd4e9047b6","Type":"ContainerDied","Data":"04b05ece9dec83e03596ae482c704b765ff2110ee78173c090cf624823fd8438"} Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.110073 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.622406092 podStartE2EDuration="8.11004932s" podCreationTimestamp="2025-12-05 11:10:28 +0000 UTC" firstStartedPulling="2025-12-05 11:10:31.47091618 +0000 UTC m=+1358.419033884" lastFinishedPulling="2025-12-05 11:10:34.958559388 +0000 UTC m=+1361.906677112" observedRunningTime="2025-12-05 11:10:36.099498487 +0000 UTC m=+1363.047616211" watchObservedRunningTime="2025-12-05 11:10:36.11004932 +0000 UTC m=+1363.058167024" Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.125153 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.140121 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.140097644 podStartE2EDuration="2.140097644s" podCreationTimestamp="2025-12-05 11:10:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:36.136304933 +0000 UTC m=+1363.084422647" watchObservedRunningTime="2025-12-05 11:10:36.140097644 +0000 UTC m=+1363.088215348" Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.374586 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.182:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.416523 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.182:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.812928 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.935615 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-nb\") pod \"cd11abe8-3b00-430e-bade-62fd4e9047b6\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.935674 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-sb\") pod \"cd11abe8-3b00-430e-bade-62fd4e9047b6\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.935817 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-svc\") pod \"cd11abe8-3b00-430e-bade-62fd4e9047b6\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.935949 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-swift-storage-0\") pod \"cd11abe8-3b00-430e-bade-62fd4e9047b6\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.936039 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-config\") pod \"cd11abe8-3b00-430e-bade-62fd4e9047b6\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.936085 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwzbg\" (UniqueName: \"kubernetes.io/projected/cd11abe8-3b00-430e-bade-62fd4e9047b6-kube-api-access-xwzbg\") pod \"cd11abe8-3b00-430e-bade-62fd4e9047b6\" (UID: \"cd11abe8-3b00-430e-bade-62fd4e9047b6\") " Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.943816 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd11abe8-3b00-430e-bade-62fd4e9047b6-kube-api-access-xwzbg" (OuterVolumeSpecName: "kube-api-access-xwzbg") pod "cd11abe8-3b00-430e-bade-62fd4e9047b6" (UID: "cd11abe8-3b00-430e-bade-62fd4e9047b6"). InnerVolumeSpecName "kube-api-access-xwzbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:36 crc kubenswrapper[5014]: I1205 11:10:36.991973 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "cd11abe8-3b00-430e-bade-62fd4e9047b6" (UID: "cd11abe8-3b00-430e-bade-62fd4e9047b6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.007701 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cd11abe8-3b00-430e-bade-62fd4e9047b6" (UID: "cd11abe8-3b00-430e-bade-62fd4e9047b6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.033898 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cd11abe8-3b00-430e-bade-62fd4e9047b6" (UID: "cd11abe8-3b00-430e-bade-62fd4e9047b6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.040166 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.040203 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.040215 5014 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.040235 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwzbg\" (UniqueName: \"kubernetes.io/projected/cd11abe8-3b00-430e-bade-62fd4e9047b6-kube-api-access-xwzbg\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.052884 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cd11abe8-3b00-430e-bade-62fd4e9047b6" (UID: "cd11abe8-3b00-430e-bade-62fd4e9047b6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.066732 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-config" (OuterVolumeSpecName: "config") pod "cd11abe8-3b00-430e-bade-62fd4e9047b6" (UID: "cd11abe8-3b00-430e-bade-62fd4e9047b6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.092569 5014 generic.go:334] "Generic (PLEG): container finished" podID="cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf" containerID="47ef7cc458f3ef485675039a98477765e3ae2d9525caa0186c26df7be102e80b" exitCode=0 Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.092650 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xkr6j" event={"ID":"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf","Type":"ContainerDied","Data":"47ef7cc458f3ef485675039a98477765e3ae2d9525caa0186c26df7be102e80b"} Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.097900 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.097251 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-pp7kf" event={"ID":"cd11abe8-3b00-430e-bade-62fd4e9047b6","Type":"ContainerDied","Data":"343eca2baa62f924d83266d2b8f49eacefb8cb46bb779a7db3869b2274a70dce"} Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.098778 5014 scope.go:117] "RemoveContainer" containerID="04b05ece9dec83e03596ae482c704b765ff2110ee78173c090cf624823fd8438" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.144563 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.144618 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cd11abe8-3b00-430e-bade-62fd4e9047b6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.161022 5014 scope.go:117] "RemoveContainer" containerID="409fbb13373ac8520932f8e4dae0106a4fc39d6323bbf2bf7df692b326b1c5e5" Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.202531 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-pp7kf"] Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.213010 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-pp7kf"] Dec 05 11:10:37 crc kubenswrapper[5014]: I1205 11:10:37.328215 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd11abe8-3b00-430e-bade-62fd4e9047b6" path="/var/lib/kubelet/pods/cd11abe8-3b00-430e-bade-62fd4e9047b6/volumes" Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.108426 5014 generic.go:334] "Generic (PLEG): container finished" podID="167e48bc-c3dd-464f-b4ed-9109ffc6de7d" containerID="993c4f1fcff568c62368371c02074c7b709267753f182b64ea886085a3c51790" exitCode=0 Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.108512 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bb4zk" event={"ID":"167e48bc-c3dd-464f-b4ed-9109ffc6de7d","Type":"ContainerDied","Data":"993c4f1fcff568c62368371c02074c7b709267753f182b64ea886085a3c51790"} Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.582674 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.677583 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-scripts\") pod \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.677758 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-combined-ca-bundle\") pod \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.677952 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gt9q2\" (UniqueName: \"kubernetes.io/projected/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-kube-api-access-gt9q2\") pod \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.678127 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-config-data\") pod \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\" (UID: \"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf\") " Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.682514 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-kube-api-access-gt9q2" (OuterVolumeSpecName: "kube-api-access-gt9q2") pod "cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf" (UID: "cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf"). InnerVolumeSpecName "kube-api-access-gt9q2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.682717 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-scripts" (OuterVolumeSpecName: "scripts") pod "cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf" (UID: "cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.704434 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf" (UID: "cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.731945 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-config-data" (OuterVolumeSpecName: "config-data") pod "cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf" (UID: "cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.781041 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.781452 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.781467 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:38 crc kubenswrapper[5014]: I1205 11:10:38.781509 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gt9q2\" (UniqueName: \"kubernetes.io/projected/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf-kube-api-access-gt9q2\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.123009 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xkr6j" event={"ID":"cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf","Type":"ContainerDied","Data":"b918b60fda99b70ad2a501847d3a35cb8fc35598393716780d2132a826e89ad3"} Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.123049 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b918b60fda99b70ad2a501847d3a35cb8fc35598393716780d2132a826e89ad3" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.123138 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xkr6j" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.330564 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.336862 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerName="nova-api-log" containerID="cri-o://ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363" gracePeriod=30 Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.337236 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerName="nova-api-api" containerID="cri-o://0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67" gracePeriod=30 Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.345457 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.345665 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c2195caf-693e-4f84-bd70-106a0c927b27" containerName="nova-scheduler-scheduler" containerID="cri-o://cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56" gracePeriod=30 Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.360819 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.361018 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ec59142e-26a3-473e-9172-4ad744fc8515" containerName="nova-metadata-log" containerID="cri-o://75d3ad02d7cb1baee600cf63fd72bb6923d687fd4cd8241e6e41d0835077a7ef" gracePeriod=30 Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.361192 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ec59142e-26a3-473e-9172-4ad744fc8515" containerName="nova-metadata-metadata" containerID="cri-o://e065f2ee0bab982045a51ed31e48bc0b1b0e0f428ca9a840a8ee4698a9616c5f" gracePeriod=30 Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.426183 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.426238 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.594294 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.700834 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-scripts\") pod \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.700958 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-config-data\") pod \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.701013 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-combined-ca-bundle\") pod \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.701124 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pftk\" (UniqueName: \"kubernetes.io/projected/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-kube-api-access-4pftk\") pod \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\" (UID: \"167e48bc-c3dd-464f-b4ed-9109ffc6de7d\") " Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.706386 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-kube-api-access-4pftk" (OuterVolumeSpecName: "kube-api-access-4pftk") pod "167e48bc-c3dd-464f-b4ed-9109ffc6de7d" (UID: "167e48bc-c3dd-464f-b4ed-9109ffc6de7d"). InnerVolumeSpecName "kube-api-access-4pftk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.709230 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-scripts" (OuterVolumeSpecName: "scripts") pod "167e48bc-c3dd-464f-b4ed-9109ffc6de7d" (UID: "167e48bc-c3dd-464f-b4ed-9109ffc6de7d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.727905 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "167e48bc-c3dd-464f-b4ed-9109ffc6de7d" (UID: "167e48bc-c3dd-464f-b4ed-9109ffc6de7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.737716 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-config-data" (OuterVolumeSpecName: "config-data") pod "167e48bc-c3dd-464f-b4ed-9109ffc6de7d" (UID: "167e48bc-c3dd-464f-b4ed-9109ffc6de7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.804565 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pftk\" (UniqueName: \"kubernetes.io/projected/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-kube-api-access-4pftk\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.804600 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.804612 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:39 crc kubenswrapper[5014]: I1205 11:10:39.804622 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/167e48bc-c3dd-464f-b4ed-9109ffc6de7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.140075 5014 generic.go:334] "Generic (PLEG): container finished" podID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerID="ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363" exitCode=143 Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.140442 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d796a63b-7c8c-450e-8b6f-f2fd239e13c9","Type":"ContainerDied","Data":"ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363"} Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.143164 5014 generic.go:334] "Generic (PLEG): container finished" podID="ec59142e-26a3-473e-9172-4ad744fc8515" containerID="e065f2ee0bab982045a51ed31e48bc0b1b0e0f428ca9a840a8ee4698a9616c5f" exitCode=0 Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.143215 5014 generic.go:334] "Generic (PLEG): container finished" podID="ec59142e-26a3-473e-9172-4ad744fc8515" containerID="75d3ad02d7cb1baee600cf63fd72bb6923d687fd4cd8241e6e41d0835077a7ef" exitCode=143 Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.143249 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec59142e-26a3-473e-9172-4ad744fc8515","Type":"ContainerDied","Data":"e065f2ee0bab982045a51ed31e48bc0b1b0e0f428ca9a840a8ee4698a9616c5f"} Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.143302 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec59142e-26a3-473e-9172-4ad744fc8515","Type":"ContainerDied","Data":"75d3ad02d7cb1baee600cf63fd72bb6923d687fd4cd8241e6e41d0835077a7ef"} Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.157172 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bb4zk" event={"ID":"167e48bc-c3dd-464f-b4ed-9109ffc6de7d","Type":"ContainerDied","Data":"3d3de94d66c6129575285f7d32cb81df815feceba21df1b6ddce4a60292ae422"} Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.157211 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d3de94d66c6129575285f7d32cb81df815feceba21df1b6ddce4a60292ae422" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.157303 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bb4zk" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.208067 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 11:10:40 crc kubenswrapper[5014]: E1205 11:10:40.208733 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd11abe8-3b00-430e-bade-62fd4e9047b6" containerName="dnsmasq-dns" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.208817 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd11abe8-3b00-430e-bade-62fd4e9047b6" containerName="dnsmasq-dns" Dec 05 11:10:40 crc kubenswrapper[5014]: E1205 11:10:40.208919 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="167e48bc-c3dd-464f-b4ed-9109ffc6de7d" containerName="nova-cell1-conductor-db-sync" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.208983 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="167e48bc-c3dd-464f-b4ed-9109ffc6de7d" containerName="nova-cell1-conductor-db-sync" Dec 05 11:10:40 crc kubenswrapper[5014]: E1205 11:10:40.209068 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd11abe8-3b00-430e-bade-62fd4e9047b6" containerName="init" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.209129 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd11abe8-3b00-430e-bade-62fd4e9047b6" containerName="init" Dec 05 11:10:40 crc kubenswrapper[5014]: E1205 11:10:40.209205 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf" containerName="nova-manage" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.209295 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf" containerName="nova-manage" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.209589 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd11abe8-3b00-430e-bade-62fd4e9047b6" containerName="dnsmasq-dns" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.209675 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf" containerName="nova-manage" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.209755 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="167e48bc-c3dd-464f-b4ed-9109ffc6de7d" containerName="nova-cell1-conductor-db-sync" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.210675 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.222179 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.229157 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.233393 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.240507 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9cdm\" (UniqueName: \"kubernetes.io/projected/2d40af86-9a47-4de0-aa6f-a0ec696d2c23-kube-api-access-j9cdm\") pod \"nova-cell1-conductor-0\" (UID: \"2d40af86-9a47-4de0-aa6f-a0ec696d2c23\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.240815 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d40af86-9a47-4de0-aa6f-a0ec696d2c23-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2d40af86-9a47-4de0-aa6f-a0ec696d2c23\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.241015 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d40af86-9a47-4de0-aa6f-a0ec696d2c23-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2d40af86-9a47-4de0-aa6f-a0ec696d2c23\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.342334 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwxwl\" (UniqueName: \"kubernetes.io/projected/ec59142e-26a3-473e-9172-4ad744fc8515-kube-api-access-rwxwl\") pod \"ec59142e-26a3-473e-9172-4ad744fc8515\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.342604 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-nova-metadata-tls-certs\") pod \"ec59142e-26a3-473e-9172-4ad744fc8515\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.343130 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec59142e-26a3-473e-9172-4ad744fc8515-logs\") pod \"ec59142e-26a3-473e-9172-4ad744fc8515\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.343237 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-config-data\") pod \"ec59142e-26a3-473e-9172-4ad744fc8515\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.343369 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-combined-ca-bundle\") pod \"ec59142e-26a3-473e-9172-4ad744fc8515\" (UID: \"ec59142e-26a3-473e-9172-4ad744fc8515\") " Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.343663 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d40af86-9a47-4de0-aa6f-a0ec696d2c23-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2d40af86-9a47-4de0-aa6f-a0ec696d2c23\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.343419 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec59142e-26a3-473e-9172-4ad744fc8515-logs" (OuterVolumeSpecName: "logs") pod "ec59142e-26a3-473e-9172-4ad744fc8515" (UID: "ec59142e-26a3-473e-9172-4ad744fc8515"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.344610 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9cdm\" (UniqueName: \"kubernetes.io/projected/2d40af86-9a47-4de0-aa6f-a0ec696d2c23-kube-api-access-j9cdm\") pod \"nova-cell1-conductor-0\" (UID: \"2d40af86-9a47-4de0-aa6f-a0ec696d2c23\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.344772 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d40af86-9a47-4de0-aa6f-a0ec696d2c23-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2d40af86-9a47-4de0-aa6f-a0ec696d2c23\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.345591 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec59142e-26a3-473e-9172-4ad744fc8515-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.348264 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d40af86-9a47-4de0-aa6f-a0ec696d2c23-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2d40af86-9a47-4de0-aa6f-a0ec696d2c23\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.349130 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec59142e-26a3-473e-9172-4ad744fc8515-kube-api-access-rwxwl" (OuterVolumeSpecName: "kube-api-access-rwxwl") pod "ec59142e-26a3-473e-9172-4ad744fc8515" (UID: "ec59142e-26a3-473e-9172-4ad744fc8515"). InnerVolumeSpecName "kube-api-access-rwxwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.351648 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d40af86-9a47-4de0-aa6f-a0ec696d2c23-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2d40af86-9a47-4de0-aa6f-a0ec696d2c23\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.364001 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9cdm\" (UniqueName: \"kubernetes.io/projected/2d40af86-9a47-4de0-aa6f-a0ec696d2c23-kube-api-access-j9cdm\") pod \"nova-cell1-conductor-0\" (UID: \"2d40af86-9a47-4de0-aa6f-a0ec696d2c23\") " pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.378065 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-config-data" (OuterVolumeSpecName: "config-data") pod "ec59142e-26a3-473e-9172-4ad744fc8515" (UID: "ec59142e-26a3-473e-9172-4ad744fc8515"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.381433 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec59142e-26a3-473e-9172-4ad744fc8515" (UID: "ec59142e-26a3-473e-9172-4ad744fc8515"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.401746 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "ec59142e-26a3-473e-9172-4ad744fc8515" (UID: "ec59142e-26a3-473e-9172-4ad744fc8515"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.448002 5014 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.448037 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.448047 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec59142e-26a3-473e-9172-4ad744fc8515-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.448055 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwxwl\" (UniqueName: \"kubernetes.io/projected/ec59142e-26a3-473e-9172-4ad744fc8515-kube-api-access-rwxwl\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.537995 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:40 crc kubenswrapper[5014]: E1205 11:10:40.745849 5014 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 11:10:40 crc kubenswrapper[5014]: E1205 11:10:40.747412 5014 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 11:10:40 crc kubenswrapper[5014]: E1205 11:10:40.750031 5014 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 11:10:40 crc kubenswrapper[5014]: E1205 11:10:40.750073 5014 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c2195caf-693e-4f84-bd70-106a0c927b27" containerName="nova-scheduler-scheduler" Dec 05 11:10:40 crc kubenswrapper[5014]: I1205 11:10:40.996406 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.166472 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2d40af86-9a47-4de0-aa6f-a0ec696d2c23","Type":"ContainerStarted","Data":"b0ce42ba33dca3de8753122bc5d41286fab6f98cb7995426d60e2a1e56fd44ca"} Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.168431 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec59142e-26a3-473e-9172-4ad744fc8515","Type":"ContainerDied","Data":"52541a152e0b47067a92e6d48b5fc43e14b3e3c00488a587e0a54b87dfd326ea"} Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.168497 5014 scope.go:117] "RemoveContainer" containerID="e065f2ee0bab982045a51ed31e48bc0b1b0e0f428ca9a840a8ee4698a9616c5f" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.168548 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.194083 5014 scope.go:117] "RemoveContainer" containerID="75d3ad02d7cb1baee600cf63fd72bb6923d687fd4cd8241e6e41d0835077a7ef" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.218040 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.239384 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.250754 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:41 crc kubenswrapper[5014]: E1205 11:10:41.252027 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec59142e-26a3-473e-9172-4ad744fc8515" containerName="nova-metadata-metadata" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.252107 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec59142e-26a3-473e-9172-4ad744fc8515" containerName="nova-metadata-metadata" Dec 05 11:10:41 crc kubenswrapper[5014]: E1205 11:10:41.252135 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec59142e-26a3-473e-9172-4ad744fc8515" containerName="nova-metadata-log" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.252142 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec59142e-26a3-473e-9172-4ad744fc8515" containerName="nova-metadata-log" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.252415 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec59142e-26a3-473e-9172-4ad744fc8515" containerName="nova-metadata-log" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.252443 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec59142e-26a3-473e-9172-4ad744fc8515" containerName="nova-metadata-metadata" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.253693 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.255934 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.260535 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.262552 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.329836 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec59142e-26a3-473e-9172-4ad744fc8515" path="/var/lib/kubelet/pods/ec59142e-26a3-473e-9172-4ad744fc8515/volumes" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.364233 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30ec1d91-2613-41f0-92b4-2c195597789d-logs\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.364346 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55mdw\" (UniqueName: \"kubernetes.io/projected/30ec1d91-2613-41f0-92b4-2c195597789d-kube-api-access-55mdw\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.364516 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-config-data\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.364554 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.364619 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.466844 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-config-data\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.467146 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.467203 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.467420 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30ec1d91-2613-41f0-92b4-2c195597789d-logs\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.467454 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55mdw\" (UniqueName: \"kubernetes.io/projected/30ec1d91-2613-41f0-92b4-2c195597789d-kube-api-access-55mdw\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.468220 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30ec1d91-2613-41f0-92b4-2c195597789d-logs\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.473132 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.473366 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.475536 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-config-data\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.487788 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55mdw\" (UniqueName: \"kubernetes.io/projected/30ec1d91-2613-41f0-92b4-2c195597789d-kube-api-access-55mdw\") pod \"nova-metadata-0\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " pod="openstack/nova-metadata-0" Dec 05 11:10:41 crc kubenswrapper[5014]: I1205 11:10:41.580557 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:10:42 crc kubenswrapper[5014]: I1205 11:10:42.087763 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:10:42 crc kubenswrapper[5014]: W1205 11:10:42.097076 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30ec1d91_2613_41f0_92b4_2c195597789d.slice/crio-f74922d873c1ba080cadea53bd3e1a9928bc0169f47d5b3570769b79248e9ca4 WatchSource:0}: Error finding container f74922d873c1ba080cadea53bd3e1a9928bc0169f47d5b3570769b79248e9ca4: Status 404 returned error can't find the container with id f74922d873c1ba080cadea53bd3e1a9928bc0169f47d5b3570769b79248e9ca4 Dec 05 11:10:42 crc kubenswrapper[5014]: I1205 11:10:42.182683 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"30ec1d91-2613-41f0-92b4-2c195597789d","Type":"ContainerStarted","Data":"f74922d873c1ba080cadea53bd3e1a9928bc0169f47d5b3570769b79248e9ca4"} Dec 05 11:10:42 crc kubenswrapper[5014]: I1205 11:10:42.186104 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2d40af86-9a47-4de0-aa6f-a0ec696d2c23","Type":"ContainerStarted","Data":"20b139924ed23e17578d80391ce7521331c621360da5b679fa6d7f42d5a8bd30"} Dec 05 11:10:42 crc kubenswrapper[5014]: I1205 11:10:42.186285 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:42 crc kubenswrapper[5014]: I1205 11:10:42.205249 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.205234028 podStartE2EDuration="2.205234028s" podCreationTimestamp="2025-12-05 11:10:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:42.201160438 +0000 UTC m=+1369.149278142" watchObservedRunningTime="2025-12-05 11:10:42.205234028 +0000 UTC m=+1369.153351732" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.176171 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.203046 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"30ec1d91-2613-41f0-92b4-2c195597789d","Type":"ContainerStarted","Data":"2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f"} Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.203090 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"30ec1d91-2613-41f0-92b4-2c195597789d","Type":"ContainerStarted","Data":"c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32"} Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.205333 5014 generic.go:334] "Generic (PLEG): container finished" podID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerID="0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67" exitCode=0 Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.205344 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d796a63b-7c8c-450e-8b6f-f2fd239e13c9","Type":"ContainerDied","Data":"0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67"} Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.205403 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d796a63b-7c8c-450e-8b6f-f2fd239e13c9","Type":"ContainerDied","Data":"171e420b1aad59286851b6d34b6e8ba56be471c196c26527266b23dbcdbd988c"} Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.205753 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.205878 5014 scope.go:117] "RemoveContainer" containerID="0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.223753 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dntfp\" (UniqueName: \"kubernetes.io/projected/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-kube-api-access-dntfp\") pod \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.223849 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-combined-ca-bundle\") pod \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.223906 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-logs\") pod \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.223930 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-config-data\") pod \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\" (UID: \"d796a63b-7c8c-450e-8b6f-f2fd239e13c9\") " Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.225655 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-logs" (OuterVolumeSpecName: "logs") pod "d796a63b-7c8c-450e-8b6f-f2fd239e13c9" (UID: "d796a63b-7c8c-450e-8b6f-f2fd239e13c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.227379 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.227357728 podStartE2EDuration="2.227357728s" podCreationTimestamp="2025-12-05 11:10:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:43.221501395 +0000 UTC m=+1370.169619119" watchObservedRunningTime="2025-12-05 11:10:43.227357728 +0000 UTC m=+1370.175475432" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.232865 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-kube-api-access-dntfp" (OuterVolumeSpecName: "kube-api-access-dntfp") pod "d796a63b-7c8c-450e-8b6f-f2fd239e13c9" (UID: "d796a63b-7c8c-450e-8b6f-f2fd239e13c9"). InnerVolumeSpecName "kube-api-access-dntfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.239084 5014 scope.go:117] "RemoveContainer" containerID="ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.264875 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-config-data" (OuterVolumeSpecName: "config-data") pod "d796a63b-7c8c-450e-8b6f-f2fd239e13c9" (UID: "d796a63b-7c8c-450e-8b6f-f2fd239e13c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.268556 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d796a63b-7c8c-450e-8b6f-f2fd239e13c9" (UID: "d796a63b-7c8c-450e-8b6f-f2fd239e13c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.286049 5014 scope.go:117] "RemoveContainer" containerID="0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67" Dec 05 11:10:43 crc kubenswrapper[5014]: E1205 11:10:43.286899 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67\": container with ID starting with 0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67 not found: ID does not exist" containerID="0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.286948 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67"} err="failed to get container status \"0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67\": rpc error: code = NotFound desc = could not find container \"0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67\": container with ID starting with 0791f82849af9742bf7c6ee77a056171e58815bf718e2d2cd2096c1e773d3b67 not found: ID does not exist" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.286980 5014 scope.go:117] "RemoveContainer" containerID="ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363" Dec 05 11:10:43 crc kubenswrapper[5014]: E1205 11:10:43.287495 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363\": container with ID starting with ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363 not found: ID does not exist" containerID="ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.287516 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363"} err="failed to get container status \"ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363\": rpc error: code = NotFound desc = could not find container \"ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363\": container with ID starting with ec22957b89389521cf57859e72fea9eb6ed6f8e2a3a536ea5a0e0b80b429a363 not found: ID does not exist" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.328377 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dntfp\" (UniqueName: \"kubernetes.io/projected/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-kube-api-access-dntfp\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.328407 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.328420 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.328429 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d796a63b-7c8c-450e-8b6f-f2fd239e13c9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.590420 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.595211 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.606854 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 11:10:43 crc kubenswrapper[5014]: E1205 11:10:43.607439 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerName="nova-api-log" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.607457 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerName="nova-api-log" Dec 05 11:10:43 crc kubenswrapper[5014]: E1205 11:10:43.607498 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerName="nova-api-api" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.607505 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerName="nova-api-api" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.607745 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerName="nova-api-api" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.607767 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" containerName="nova-api-log" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.609031 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.612003 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.617767 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.641956 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-config-data\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.642013 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gl8v\" (UniqueName: \"kubernetes.io/projected/1cff6892-6841-4b91-bfb9-f015977f4023-kube-api-access-9gl8v\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.642100 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1cff6892-6841-4b91-bfb9-f015977f4023-logs\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.642885 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.746537 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.746661 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-config-data\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.746949 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gl8v\" (UniqueName: \"kubernetes.io/projected/1cff6892-6841-4b91-bfb9-f015977f4023-kube-api-access-9gl8v\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.746982 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1cff6892-6841-4b91-bfb9-f015977f4023-logs\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.747398 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1cff6892-6841-4b91-bfb9-f015977f4023-logs\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.753136 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-config-data\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.758448 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.763963 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gl8v\" (UniqueName: \"kubernetes.io/projected/1cff6892-6841-4b91-bfb9-f015977f4023-kube-api-access-9gl8v\") pod \"nova-api-0\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " pod="openstack/nova-api-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.920188 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.950337 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-config-data\") pod \"c2195caf-693e-4f84-bd70-106a0c927b27\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.950503 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmw4v\" (UniqueName: \"kubernetes.io/projected/c2195caf-693e-4f84-bd70-106a0c927b27-kube-api-access-fmw4v\") pod \"c2195caf-693e-4f84-bd70-106a0c927b27\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.950589 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-combined-ca-bundle\") pod \"c2195caf-693e-4f84-bd70-106a0c927b27\" (UID: \"c2195caf-693e-4f84-bd70-106a0c927b27\") " Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.954487 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2195caf-693e-4f84-bd70-106a0c927b27-kube-api-access-fmw4v" (OuterVolumeSpecName: "kube-api-access-fmw4v") pod "c2195caf-693e-4f84-bd70-106a0c927b27" (UID: "c2195caf-693e-4f84-bd70-106a0c927b27"). InnerVolumeSpecName "kube-api-access-fmw4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.980392 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-config-data" (OuterVolumeSpecName: "config-data") pod "c2195caf-693e-4f84-bd70-106a0c927b27" (UID: "c2195caf-693e-4f84-bd70-106a0c927b27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:43 crc kubenswrapper[5014]: I1205 11:10:43.983445 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2195caf-693e-4f84-bd70-106a0c927b27" (UID: "c2195caf-693e-4f84-bd70-106a0c927b27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.005092 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.053135 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.053171 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmw4v\" (UniqueName: \"kubernetes.io/projected/c2195caf-693e-4f84-bd70-106a0c927b27-kube-api-access-fmw4v\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.053181 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2195caf-693e-4f84-bd70-106a0c927b27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.234057 5014 generic.go:334] "Generic (PLEG): container finished" podID="c2195caf-693e-4f84-bd70-106a0c927b27" containerID="cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56" exitCode=0 Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.234111 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.234187 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2195caf-693e-4f84-bd70-106a0c927b27","Type":"ContainerDied","Data":"cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56"} Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.234215 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2195caf-693e-4f84-bd70-106a0c927b27","Type":"ContainerDied","Data":"82aebf67f88aadaa7d176d2f6eb34cb584a515806630b73bfefec9745e820166"} Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.234235 5014 scope.go:117] "RemoveContainer" containerID="cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.284116 5014 scope.go:117] "RemoveContainer" containerID="cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56" Dec 05 11:10:44 crc kubenswrapper[5014]: E1205 11:10:44.285511 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56\": container with ID starting with cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56 not found: ID does not exist" containerID="cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.285565 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56"} err="failed to get container status \"cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56\": rpc error: code = NotFound desc = could not find container \"cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56\": container with ID starting with cfaaf1adacb4f84b64136653c04c78735a71efa17f7a869f113d79536ca0ea56 not found: ID does not exist" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.292851 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.306682 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.314965 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:10:44 crc kubenswrapper[5014]: E1205 11:10:44.315525 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2195caf-693e-4f84-bd70-106a0c927b27" containerName="nova-scheduler-scheduler" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.315544 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2195caf-693e-4f84-bd70-106a0c927b27" containerName="nova-scheduler-scheduler" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.315775 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2195caf-693e-4f84-bd70-106a0c927b27" containerName="nova-scheduler-scheduler" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.316695 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.319691 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.323479 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.366580 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-config-data\") pod \"nova-scheduler-0\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.366692 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tj8z\" (UniqueName: \"kubernetes.io/projected/604552d0-1f4f-4ae5-b515-d95071b12f38-kube-api-access-7tj8z\") pod \"nova-scheduler-0\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.366731 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.470537 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tj8z\" (UniqueName: \"kubernetes.io/projected/604552d0-1f4f-4ae5-b515-d95071b12f38-kube-api-access-7tj8z\") pod \"nova-scheduler-0\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.470609 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.470723 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-config-data\") pod \"nova-scheduler-0\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.475173 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-config-data\") pod \"nova-scheduler-0\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.475675 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.476233 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.487323 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tj8z\" (UniqueName: \"kubernetes.io/projected/604552d0-1f4f-4ae5-b515-d95071b12f38-kube-api-access-7tj8z\") pod \"nova-scheduler-0\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " pod="openstack/nova-scheduler-0" Dec 05 11:10:44 crc kubenswrapper[5014]: I1205 11:10:44.636786 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:10:45 crc kubenswrapper[5014]: I1205 11:10:45.185734 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:10:45 crc kubenswrapper[5014]: W1205 11:10:45.188202 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod604552d0_1f4f_4ae5_b515_d95071b12f38.slice/crio-035154dbe6fa9ccd9ae3180dd89519edaabad1ff455bf7c8aa9659ffeb1fa4c7 WatchSource:0}: Error finding container 035154dbe6fa9ccd9ae3180dd89519edaabad1ff455bf7c8aa9659ffeb1fa4c7: Status 404 returned error can't find the container with id 035154dbe6fa9ccd9ae3180dd89519edaabad1ff455bf7c8aa9659ffeb1fa4c7 Dec 05 11:10:45 crc kubenswrapper[5014]: I1205 11:10:45.250878 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1cff6892-6841-4b91-bfb9-f015977f4023","Type":"ContainerStarted","Data":"55fca510049101973f58fbd464884baa6b5c1c72c57984ac6341205af447cd84"} Dec 05 11:10:45 crc kubenswrapper[5014]: I1205 11:10:45.250923 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1cff6892-6841-4b91-bfb9-f015977f4023","Type":"ContainerStarted","Data":"175de7d209c08ec56ae4939a0d67945be6eb32231419d07912581dc44fabb52f"} Dec 05 11:10:45 crc kubenswrapper[5014]: I1205 11:10:45.250932 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1cff6892-6841-4b91-bfb9-f015977f4023","Type":"ContainerStarted","Data":"1157ebce88c2b427a1d673df8de2fa90ad6f27dc343e005a08ee257a3a297ccd"} Dec 05 11:10:45 crc kubenswrapper[5014]: I1205 11:10:45.254175 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"604552d0-1f4f-4ae5-b515-d95071b12f38","Type":"ContainerStarted","Data":"035154dbe6fa9ccd9ae3180dd89519edaabad1ff455bf7c8aa9659ffeb1fa4c7"} Dec 05 11:10:45 crc kubenswrapper[5014]: I1205 11:10:45.276550 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.276528319 podStartE2EDuration="2.276528319s" podCreationTimestamp="2025-12-05 11:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:45.26959097 +0000 UTC m=+1372.217708694" watchObservedRunningTime="2025-12-05 11:10:45.276528319 +0000 UTC m=+1372.224646023" Dec 05 11:10:45 crc kubenswrapper[5014]: I1205 11:10:45.331303 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2195caf-693e-4f84-bd70-106a0c927b27" path="/var/lib/kubelet/pods/c2195caf-693e-4f84-bd70-106a0c927b27/volumes" Dec 05 11:10:45 crc kubenswrapper[5014]: I1205 11:10:45.332239 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d796a63b-7c8c-450e-8b6f-f2fd239e13c9" path="/var/lib/kubelet/pods/d796a63b-7c8c-450e-8b6f-f2fd239e13c9/volumes" Dec 05 11:10:46 crc kubenswrapper[5014]: I1205 11:10:46.266770 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"604552d0-1f4f-4ae5-b515-d95071b12f38","Type":"ContainerStarted","Data":"561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249"} Dec 05 11:10:46 crc kubenswrapper[5014]: I1205 11:10:46.284317 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.284296179 podStartE2EDuration="2.284296179s" podCreationTimestamp="2025-12-05 11:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:10:46.282241409 +0000 UTC m=+1373.230359143" watchObservedRunningTime="2025-12-05 11:10:46.284296179 +0000 UTC m=+1373.232413893" Dec 05 11:10:46 crc kubenswrapper[5014]: I1205 11:10:46.582038 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 11:10:46 crc kubenswrapper[5014]: I1205 11:10:46.582111 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 11:10:49 crc kubenswrapper[5014]: I1205 11:10:49.637717 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 11:10:50 crc kubenswrapper[5014]: I1205 11:10:50.566603 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 11:10:51 crc kubenswrapper[5014]: I1205 11:10:51.581394 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 11:10:51 crc kubenswrapper[5014]: I1205 11:10:51.581745 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 11:10:52 crc kubenswrapper[5014]: I1205 11:10:52.596536 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:10:52 crc kubenswrapper[5014]: I1205 11:10:52.596566 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:10:54 crc kubenswrapper[5014]: I1205 11:10:54.006299 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:10:54 crc kubenswrapper[5014]: I1205 11:10:54.006671 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:10:54 crc kubenswrapper[5014]: I1205 11:10:54.638146 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 11:10:54 crc kubenswrapper[5014]: I1205 11:10:54.795091 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 11:10:55 crc kubenswrapper[5014]: I1205 11:10:55.088531 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 11:10:55 crc kubenswrapper[5014]: I1205 11:10:55.088581 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 11:10:55 crc kubenswrapper[5014]: I1205 11:10:55.378391 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 11:10:59 crc kubenswrapper[5014]: I1205 11:10:59.363529 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 11:11:01 crc kubenswrapper[5014]: I1205 11:11:01.585799 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 11:11:01 crc kubenswrapper[5014]: I1205 11:11:01.589599 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 11:11:01 crc kubenswrapper[5014]: I1205 11:11:01.596427 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.405479 5014 generic.go:334] "Generic (PLEG): container finished" podID="c1290dfc-17fd-46a1-8f34-d1e338523945" containerID="a270aa9e58e6d1e81130adf28324cb68cdf67724cf4377ac0f05b5314e1185c3" exitCode=137 Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.405552 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c1290dfc-17fd-46a1-8f34-d1e338523945","Type":"ContainerDied","Data":"a270aa9e58e6d1e81130adf28324cb68cdf67724cf4377ac0f05b5314e1185c3"} Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.406044 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c1290dfc-17fd-46a1-8f34-d1e338523945","Type":"ContainerDied","Data":"f7afcb1a51def3e8c451160769f6f47017890a0c6b3f58336ab20be5e78ebec3"} Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.406150 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7afcb1a51def3e8c451160769f6f47017890a0c6b3f58336ab20be5e78ebec3" Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.428010 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.445340 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.523604 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-config-data\") pod \"c1290dfc-17fd-46a1-8f34-d1e338523945\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.523709 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-combined-ca-bundle\") pod \"c1290dfc-17fd-46a1-8f34-d1e338523945\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.523765 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldwf9\" (UniqueName: \"kubernetes.io/projected/c1290dfc-17fd-46a1-8f34-d1e338523945-kube-api-access-ldwf9\") pod \"c1290dfc-17fd-46a1-8f34-d1e338523945\" (UID: \"c1290dfc-17fd-46a1-8f34-d1e338523945\") " Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.531145 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1290dfc-17fd-46a1-8f34-d1e338523945-kube-api-access-ldwf9" (OuterVolumeSpecName: "kube-api-access-ldwf9") pod "c1290dfc-17fd-46a1-8f34-d1e338523945" (UID: "c1290dfc-17fd-46a1-8f34-d1e338523945"). InnerVolumeSpecName "kube-api-access-ldwf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.615504 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c1290dfc-17fd-46a1-8f34-d1e338523945" (UID: "c1290dfc-17fd-46a1-8f34-d1e338523945"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.627433 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.627699 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldwf9\" (UniqueName: \"kubernetes.io/projected/c1290dfc-17fd-46a1-8f34-d1e338523945-kube-api-access-ldwf9\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.667413 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-config-data" (OuterVolumeSpecName: "config-data") pod "c1290dfc-17fd-46a1-8f34-d1e338523945" (UID: "c1290dfc-17fd-46a1-8f34-d1e338523945"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.729870 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1290dfc-17fd-46a1-8f34-d1e338523945-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.971810 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:11:02 crc kubenswrapper[5014]: I1205 11:11:02.972232 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="53c31740-5b9c-402e-bccb-929d64de7669" containerName="kube-state-metrics" containerID="cri-o://fa5598192edba6c6f74c8dec204a252c425ede2d7006b6d170640d7400d6273d" gracePeriod=30 Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.417686 5014 generic.go:334] "Generic (PLEG): container finished" podID="53c31740-5b9c-402e-bccb-929d64de7669" containerID="fa5598192edba6c6f74c8dec204a252c425ede2d7006b6d170640d7400d6273d" exitCode=2 Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.417873 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"53c31740-5b9c-402e-bccb-929d64de7669","Type":"ContainerDied","Data":"fa5598192edba6c6f74c8dec204a252c425ede2d7006b6d170640d7400d6273d"} Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.418464 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"53c31740-5b9c-402e-bccb-929d64de7669","Type":"ContainerDied","Data":"4d5c7c13fec2bf8f1613e8f2d4d85addfab2294cc72e1a70c94a72c8d0a5f0c0"} Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.418497 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d5c7c13fec2bf8f1613e8f2d4d85addfab2294cc72e1a70c94a72c8d0a5f0c0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.418576 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.448210 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.476704 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.497154 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.509126 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:11:03 crc kubenswrapper[5014]: E1205 11:11:03.509868 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53c31740-5b9c-402e-bccb-929d64de7669" containerName="kube-state-metrics" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.509895 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="53c31740-5b9c-402e-bccb-929d64de7669" containerName="kube-state-metrics" Dec 05 11:11:03 crc kubenswrapper[5014]: E1205 11:11:03.509931 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1290dfc-17fd-46a1-8f34-d1e338523945" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.509941 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1290dfc-17fd-46a1-8f34-d1e338523945" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.510183 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1290dfc-17fd-46a1-8f34-d1e338523945" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.510199 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="53c31740-5b9c-402e-bccb-929d64de7669" containerName="kube-state-metrics" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.511090 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.514883 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.514905 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.515216 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.525097 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.548804 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcvxl\" (UniqueName: \"kubernetes.io/projected/53c31740-5b9c-402e-bccb-929d64de7669-kube-api-access-qcvxl\") pod \"53c31740-5b9c-402e-bccb-929d64de7669\" (UID: \"53c31740-5b9c-402e-bccb-929d64de7669\") " Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.548924 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.548953 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tchb\" (UniqueName: \"kubernetes.io/projected/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-kube-api-access-9tchb\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.548992 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.549087 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.549119 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.553821 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53c31740-5b9c-402e-bccb-929d64de7669-kube-api-access-qcvxl" (OuterVolumeSpecName: "kube-api-access-qcvxl") pod "53c31740-5b9c-402e-bccb-929d64de7669" (UID: "53c31740-5b9c-402e-bccb-929d64de7669"). InnerVolumeSpecName "kube-api-access-qcvxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.650410 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.650474 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.650496 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.650516 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tchb\" (UniqueName: \"kubernetes.io/projected/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-kube-api-access-9tchb\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.650572 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.650676 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcvxl\" (UniqueName: \"kubernetes.io/projected/53c31740-5b9c-402e-bccb-929d64de7669-kube-api-access-qcvxl\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.655207 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.655764 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.656687 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.656882 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.669159 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tchb\" (UniqueName: \"kubernetes.io/projected/0d4f49a8-b03d-40a0-b688-1e47556fe7b0-kube-api-access-9tchb\") pod \"nova-cell1-novncproxy-0\" (UID: \"0d4f49a8-b03d-40a0-b688-1e47556fe7b0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:03 crc kubenswrapper[5014]: I1205 11:11:03.836151 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.010938 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.012237 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.012401 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.033033 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.292669 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.428056 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.428055 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0d4f49a8-b03d-40a0-b688-1e47556fe7b0","Type":"ContainerStarted","Data":"50b5816e15a4f25a8ac123aea3d5743c364a85f913b0dd611fc3d788ce439bcb"} Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.428414 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.438050 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.477743 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.494399 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.509723 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.511304 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.514907 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.515495 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.525612 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.619347 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-nv7fd"] Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.621720 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.647421 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-nv7fd"] Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.673262 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/732c79b1-258d-4426-9adf-3019d0935a81-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.673382 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/732c79b1-258d-4426-9adf-3019d0935a81-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.673430 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/732c79b1-258d-4426-9adf-3019d0935a81-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.673461 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpbms\" (UniqueName: \"kubernetes.io/projected/732c79b1-258d-4426-9adf-3019d0935a81-kube-api-access-fpbms\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.778029 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-config\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.778084 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/732c79b1-258d-4426-9adf-3019d0935a81-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.778229 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpbms\" (UniqueName: \"kubernetes.io/projected/732c79b1-258d-4426-9adf-3019d0935a81-kube-api-access-fpbms\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.778990 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.779059 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkclt\" (UniqueName: \"kubernetes.io/projected/5e9b17e6-f16e-4370-9278-61584a2b96a4-kube-api-access-jkclt\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.779115 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/732c79b1-258d-4426-9adf-3019d0935a81-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.779156 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.779192 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.779229 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.779262 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/732c79b1-258d-4426-9adf-3019d0935a81-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.787626 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/732c79b1-258d-4426-9adf-3019d0935a81-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.800003 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/732c79b1-258d-4426-9adf-3019d0935a81-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.800371 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/732c79b1-258d-4426-9adf-3019d0935a81-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.805156 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpbms\" (UniqueName: \"kubernetes.io/projected/732c79b1-258d-4426-9adf-3019d0935a81-kube-api-access-fpbms\") pod \"kube-state-metrics-0\" (UID: \"732c79b1-258d-4426-9adf-3019d0935a81\") " pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.832195 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.889641 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkclt\" (UniqueName: \"kubernetes.io/projected/5e9b17e6-f16e-4370-9278-61584a2b96a4-kube-api-access-jkclt\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.889767 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.889812 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.889863 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.889927 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-config\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.890004 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.891002 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.891660 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.892769 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.892899 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.893538 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-config\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.916876 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkclt\" (UniqueName: \"kubernetes.io/projected/5e9b17e6-f16e-4370-9278-61584a2b96a4-kube-api-access-jkclt\") pod \"dnsmasq-dns-5c7b6c5df9-nv7fd\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:04 crc kubenswrapper[5014]: I1205 11:11:04.951922 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.330777 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53c31740-5b9c-402e-bccb-929d64de7669" path="/var/lib/kubelet/pods/53c31740-5b9c-402e-bccb-929d64de7669/volumes" Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.331921 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1290dfc-17fd-46a1-8f34-d1e338523945" path="/var/lib/kubelet/pods/c1290dfc-17fd-46a1-8f34-d1e338523945/volumes" Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.401150 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.440955 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.478557 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0d4f49a8-b03d-40a0-b688-1e47556fe7b0","Type":"ContainerStarted","Data":"8c015fe56ae29caa7bac18a93fa5c007b681595111423f4d2ddfbaaa7384444c"} Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.497052 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"732c79b1-258d-4426-9adf-3019d0935a81","Type":"ContainerStarted","Data":"f6d9799b36411907010fdbac68fa7e725ee7c0370834c9fe537a70140da015a0"} Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.531729 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.531703551 podStartE2EDuration="2.531703551s" podCreationTimestamp="2025-12-05 11:11:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:11:05.512537354 +0000 UTC m=+1392.460655078" watchObservedRunningTime="2025-12-05 11:11:05.531703551 +0000 UTC m=+1392.479821255" Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.566585 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-nv7fd"] Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.904256 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.904944 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="proxy-httpd" containerID="cri-o://1f4d15bec7aa3e7349d8ae6ce62b966491b4a49d2794d7bf43d0022ab23cb6a2" gracePeriod=30 Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.904985 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="sg-core" containerID="cri-o://5880170552fc383dd4b065a8d7328729e9a70eee657a91e8f42d0ac20d78abd9" gracePeriod=30 Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.905018 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="ceilometer-notification-agent" containerID="cri-o://dca2fc7121ab0e756b20425edd88c2e47744b91c73aadc481d3f56f8fa58f61b" gracePeriod=30 Dec 05 11:11:05 crc kubenswrapper[5014]: I1205 11:11:05.905107 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="ceilometer-central-agent" containerID="cri-o://f43a46717ad64328fd39abfe0a782181090756536ec76cf8f401f628a6b5c485" gracePeriod=30 Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.506102 5014 generic.go:334] "Generic (PLEG): container finished" podID="5e9b17e6-f16e-4370-9278-61584a2b96a4" containerID="ecf2ff395c965b23064519d1deed5476eb4476d8a9765de92e733830dac78e76" exitCode=0 Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.506253 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" event={"ID":"5e9b17e6-f16e-4370-9278-61584a2b96a4","Type":"ContainerDied","Data":"ecf2ff395c965b23064519d1deed5476eb4476d8a9765de92e733830dac78e76"} Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.506484 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" event={"ID":"5e9b17e6-f16e-4370-9278-61584a2b96a4","Type":"ContainerStarted","Data":"e306871ffe6fd2ae9cf686f8df89cc512f0e0d2942274c97a3dd43d9fd8fd63f"} Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.518085 5014 generic.go:334] "Generic (PLEG): container finished" podID="743f670f-108f-446c-bee9-fbd36f5cf074" containerID="1f4d15bec7aa3e7349d8ae6ce62b966491b4a49d2794d7bf43d0022ab23cb6a2" exitCode=0 Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.518117 5014 generic.go:334] "Generic (PLEG): container finished" podID="743f670f-108f-446c-bee9-fbd36f5cf074" containerID="5880170552fc383dd4b065a8d7328729e9a70eee657a91e8f42d0ac20d78abd9" exitCode=2 Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.518127 5014 generic.go:334] "Generic (PLEG): container finished" podID="743f670f-108f-446c-bee9-fbd36f5cf074" containerID="f43a46717ad64328fd39abfe0a782181090756536ec76cf8f401f628a6b5c485" exitCode=0 Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.518183 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743f670f-108f-446c-bee9-fbd36f5cf074","Type":"ContainerDied","Data":"1f4d15bec7aa3e7349d8ae6ce62b966491b4a49d2794d7bf43d0022ab23cb6a2"} Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.518218 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743f670f-108f-446c-bee9-fbd36f5cf074","Type":"ContainerDied","Data":"5880170552fc383dd4b065a8d7328729e9a70eee657a91e8f42d0ac20d78abd9"} Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.518229 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743f670f-108f-446c-bee9-fbd36f5cf074","Type":"ContainerDied","Data":"f43a46717ad64328fd39abfe0a782181090756536ec76cf8f401f628a6b5c485"} Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.521940 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"732c79b1-258d-4426-9adf-3019d0935a81","Type":"ContainerStarted","Data":"bb14d77df4bc72ccec6367a750d6aae1d431e787d7a75e691ca0e9d7d3b659ae"} Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.521972 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 11:11:06 crc kubenswrapper[5014]: I1205 11:11:06.558378 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.814255979 podStartE2EDuration="2.558342363s" podCreationTimestamp="2025-12-05 11:11:04 +0000 UTC" firstStartedPulling="2025-12-05 11:11:05.440676249 +0000 UTC m=+1392.388793953" lastFinishedPulling="2025-12-05 11:11:06.184762633 +0000 UTC m=+1393.132880337" observedRunningTime="2025-12-05 11:11:06.550237805 +0000 UTC m=+1393.498355509" watchObservedRunningTime="2025-12-05 11:11:06.558342363 +0000 UTC m=+1393.506460087" Dec 05 11:11:07 crc kubenswrapper[5014]: I1205 11:11:07.186433 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:07 crc kubenswrapper[5014]: I1205 11:11:07.531614 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" event={"ID":"5e9b17e6-f16e-4370-9278-61584a2b96a4","Type":"ContainerStarted","Data":"0eedc8b3521cfb61af67c39600b8661a2045966cbe3d4ccb6a8d36220c715b6a"} Dec 05 11:11:07 crc kubenswrapper[5014]: I1205 11:11:07.531774 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" containerName="nova-api-log" containerID="cri-o://175de7d209c08ec56ae4939a0d67945be6eb32231419d07912581dc44fabb52f" gracePeriod=30 Dec 05 11:11:07 crc kubenswrapper[5014]: I1205 11:11:07.531858 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" containerName="nova-api-api" containerID="cri-o://55fca510049101973f58fbd464884baa6b5c1c72c57984ac6341205af447cd84" gracePeriod=30 Dec 05 11:11:07 crc kubenswrapper[5014]: I1205 11:11:07.559636 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" podStartSLOduration=3.559610434 podStartE2EDuration="3.559610434s" podCreationTimestamp="2025-12-05 11:11:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:11:07.551800133 +0000 UTC m=+1394.499917857" watchObservedRunningTime="2025-12-05 11:11:07.559610434 +0000 UTC m=+1394.507728138" Dec 05 11:11:08 crc kubenswrapper[5014]: I1205 11:11:08.543540 5014 generic.go:334] "Generic (PLEG): container finished" podID="1cff6892-6841-4b91-bfb9-f015977f4023" containerID="175de7d209c08ec56ae4939a0d67945be6eb32231419d07912581dc44fabb52f" exitCode=143 Dec 05 11:11:08 crc kubenswrapper[5014]: I1205 11:11:08.543625 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1cff6892-6841-4b91-bfb9-f015977f4023","Type":"ContainerDied","Data":"175de7d209c08ec56ae4939a0d67945be6eb32231419d07912581dc44fabb52f"} Dec 05 11:11:08 crc kubenswrapper[5014]: I1205 11:11:08.543902 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:08 crc kubenswrapper[5014]: I1205 11:11:08.836573 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.573462 5014 generic.go:334] "Generic (PLEG): container finished" podID="743f670f-108f-446c-bee9-fbd36f5cf074" containerID="dca2fc7121ab0e756b20425edd88c2e47744b91c73aadc481d3f56f8fa58f61b" exitCode=0 Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.573548 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743f670f-108f-446c-bee9-fbd36f5cf074","Type":"ContainerDied","Data":"dca2fc7121ab0e756b20425edd88c2e47744b91c73aadc481d3f56f8fa58f61b"} Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.577107 5014 generic.go:334] "Generic (PLEG): container finished" podID="1cff6892-6841-4b91-bfb9-f015977f4023" containerID="55fca510049101973f58fbd464884baa6b5c1c72c57984ac6341205af447cd84" exitCode=0 Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.577145 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1cff6892-6841-4b91-bfb9-f015977f4023","Type":"ContainerDied","Data":"55fca510049101973f58fbd464884baa6b5c1c72c57984ac6341205af447cd84"} Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.712027 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.721785 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.900582 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-config-data\") pod \"743f670f-108f-446c-bee9-fbd36f5cf074\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.900643 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-scripts\") pod \"743f670f-108f-446c-bee9-fbd36f5cf074\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.900691 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-run-httpd\") pod \"743f670f-108f-446c-bee9-fbd36f5cf074\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.900725 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1cff6892-6841-4b91-bfb9-f015977f4023-logs\") pod \"1cff6892-6841-4b91-bfb9-f015977f4023\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.900757 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-combined-ca-bundle\") pod \"1cff6892-6841-4b91-bfb9-f015977f4023\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.900816 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-combined-ca-bundle\") pod \"743f670f-108f-446c-bee9-fbd36f5cf074\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.900845 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-sg-core-conf-yaml\") pod \"743f670f-108f-446c-bee9-fbd36f5cf074\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.900877 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-config-data\") pod \"1cff6892-6841-4b91-bfb9-f015977f4023\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.900938 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-log-httpd\") pod \"743f670f-108f-446c-bee9-fbd36f5cf074\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.900976 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gl8v\" (UniqueName: \"kubernetes.io/projected/1cff6892-6841-4b91-bfb9-f015977f4023-kube-api-access-9gl8v\") pod \"1cff6892-6841-4b91-bfb9-f015977f4023\" (UID: \"1cff6892-6841-4b91-bfb9-f015977f4023\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.901016 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddb4h\" (UniqueName: \"kubernetes.io/projected/743f670f-108f-446c-bee9-fbd36f5cf074-kube-api-access-ddb4h\") pod \"743f670f-108f-446c-bee9-fbd36f5cf074\" (UID: \"743f670f-108f-446c-bee9-fbd36f5cf074\") " Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.901410 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cff6892-6841-4b91-bfb9-f015977f4023-logs" (OuterVolumeSpecName: "logs") pod "1cff6892-6841-4b91-bfb9-f015977f4023" (UID: "1cff6892-6841-4b91-bfb9-f015977f4023"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.902042 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "743f670f-108f-446c-bee9-fbd36f5cf074" (UID: "743f670f-108f-446c-bee9-fbd36f5cf074"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.902482 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "743f670f-108f-446c-bee9-fbd36f5cf074" (UID: "743f670f-108f-446c-bee9-fbd36f5cf074"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.907849 5014 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.907904 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1cff6892-6841-4b91-bfb9-f015977f4023-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.907917 5014 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743f670f-108f-446c-bee9-fbd36f5cf074-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.909710 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/743f670f-108f-446c-bee9-fbd36f5cf074-kube-api-access-ddb4h" (OuterVolumeSpecName: "kube-api-access-ddb4h") pod "743f670f-108f-446c-bee9-fbd36f5cf074" (UID: "743f670f-108f-446c-bee9-fbd36f5cf074"). InnerVolumeSpecName "kube-api-access-ddb4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.923065 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-scripts" (OuterVolumeSpecName: "scripts") pod "743f670f-108f-446c-bee9-fbd36f5cf074" (UID: "743f670f-108f-446c-bee9-fbd36f5cf074"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.940052 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "743f670f-108f-446c-bee9-fbd36f5cf074" (UID: "743f670f-108f-446c-bee9-fbd36f5cf074"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.943766 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-config-data" (OuterVolumeSpecName: "config-data") pod "1cff6892-6841-4b91-bfb9-f015977f4023" (UID: "1cff6892-6841-4b91-bfb9-f015977f4023"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.944144 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1cff6892-6841-4b91-bfb9-f015977f4023" (UID: "1cff6892-6841-4b91-bfb9-f015977f4023"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:11 crc kubenswrapper[5014]: I1205 11:11:11.957824 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cff6892-6841-4b91-bfb9-f015977f4023-kube-api-access-9gl8v" (OuterVolumeSpecName: "kube-api-access-9gl8v") pod "1cff6892-6841-4b91-bfb9-f015977f4023" (UID: "1cff6892-6841-4b91-bfb9-f015977f4023"). InnerVolumeSpecName "kube-api-access-9gl8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.010023 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gl8v\" (UniqueName: \"kubernetes.io/projected/1cff6892-6841-4b91-bfb9-f015977f4023-kube-api-access-9gl8v\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.010069 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddb4h\" (UniqueName: \"kubernetes.io/projected/743f670f-108f-446c-bee9-fbd36f5cf074-kube-api-access-ddb4h\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.010080 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.010090 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.010098 5014 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.010108 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cff6892-6841-4b91-bfb9-f015977f4023-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.010260 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "743f670f-108f-446c-bee9-fbd36f5cf074" (UID: "743f670f-108f-446c-bee9-fbd36f5cf074"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.037653 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-config-data" (OuterVolumeSpecName: "config-data") pod "743f670f-108f-446c-bee9-fbd36f5cf074" (UID: "743f670f-108f-446c-bee9-fbd36f5cf074"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.112244 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.112301 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743f670f-108f-446c-bee9-fbd36f5cf074-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.599498 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743f670f-108f-446c-bee9-fbd36f5cf074","Type":"ContainerDied","Data":"1efc1f451480891b1e00fbe806358a8913381fcc88ff603ae9a874939f686647"} Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.600355 5014 scope.go:117] "RemoveContainer" containerID="1f4d15bec7aa3e7349d8ae6ce62b966491b4a49d2794d7bf43d0022ab23cb6a2" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.599542 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.606495 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1cff6892-6841-4b91-bfb9-f015977f4023","Type":"ContainerDied","Data":"1157ebce88c2b427a1d673df8de2fa90ad6f27dc343e005a08ee257a3a297ccd"} Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.610150 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.651552 5014 scope.go:117] "RemoveContainer" containerID="5880170552fc383dd4b065a8d7328729e9a70eee657a91e8f42d0ac20d78abd9" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.683294 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.685603 5014 scope.go:117] "RemoveContainer" containerID="dca2fc7121ab0e756b20425edd88c2e47744b91c73aadc481d3f56f8fa58f61b" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.708042 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.737336 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.751940 5014 scope.go:117] "RemoveContainer" containerID="f43a46717ad64328fd39abfe0a782181090756536ec76cf8f401f628a6b5c485" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.760328 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.770329 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:12 crc kubenswrapper[5014]: E1205 11:11:12.770814 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="ceilometer-central-agent" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.770840 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="ceilometer-central-agent" Dec 05 11:11:12 crc kubenswrapper[5014]: E1205 11:11:12.770858 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="ceilometer-notification-agent" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.770865 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="ceilometer-notification-agent" Dec 05 11:11:12 crc kubenswrapper[5014]: E1205 11:11:12.770875 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" containerName="nova-api-api" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.770881 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" containerName="nova-api-api" Dec 05 11:11:12 crc kubenswrapper[5014]: E1205 11:11:12.770900 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" containerName="nova-api-log" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.770909 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" containerName="nova-api-log" Dec 05 11:11:12 crc kubenswrapper[5014]: E1205 11:11:12.770937 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="proxy-httpd" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.770946 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="proxy-httpd" Dec 05 11:11:12 crc kubenswrapper[5014]: E1205 11:11:12.770976 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="sg-core" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.770984 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="sg-core" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.771238 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="sg-core" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.771256 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="ceilometer-notification-agent" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.771285 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" containerName="nova-api-api" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.771311 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="proxy-httpd" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.771322 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" containerName="ceilometer-central-agent" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.771334 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" containerName="nova-api-log" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.772321 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.776190 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.777802 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.777859 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.777857 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.797050 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.800501 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.802732 5014 scope.go:117] "RemoveContainer" containerID="55fca510049101973f58fbd464884baa6b5c1c72c57984ac6341205af447cd84" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.806633 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.806825 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.807195 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.814367 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.829799 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-scripts\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.829853 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-public-tls-certs\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.829902 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-config-data\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.829924 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-config-data\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.829952 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l7hv\" (UniqueName: \"kubernetes.io/projected/040667ec-88a2-427c-bee5-78451b275439-kube-api-access-4l7hv\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.829991 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.830015 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d4ebd00-2f01-406e-9763-e4e58f33f09d-log-httpd\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.830034 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.830064 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/040667ec-88a2-427c-bee5-78451b275439-logs\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.830132 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kklc9\" (UniqueName: \"kubernetes.io/projected/4d4ebd00-2f01-406e-9763-e4e58f33f09d-kube-api-access-kklc9\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.830185 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-internal-tls-certs\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.830260 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.830325 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d4ebd00-2f01-406e-9763-e4e58f33f09d-run-httpd\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.830417 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.867217 5014 scope.go:117] "RemoveContainer" containerID="175de7d209c08ec56ae4939a0d67945be6eb32231419d07912581dc44fabb52f" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932428 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-scripts\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932490 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-public-tls-certs\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932519 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-config-data\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932540 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-config-data\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932573 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l7hv\" (UniqueName: \"kubernetes.io/projected/040667ec-88a2-427c-bee5-78451b275439-kube-api-access-4l7hv\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932606 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932629 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d4ebd00-2f01-406e-9763-e4e58f33f09d-log-httpd\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932654 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932680 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/040667ec-88a2-427c-bee5-78451b275439-logs\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932755 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kklc9\" (UniqueName: \"kubernetes.io/projected/4d4ebd00-2f01-406e-9763-e4e58f33f09d-kube-api-access-kklc9\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932811 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-internal-tls-certs\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932890 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932927 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d4ebd00-2f01-406e-9763-e4e58f33f09d-run-httpd\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.932950 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.933575 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/040667ec-88a2-427c-bee5-78451b275439-logs\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.933974 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d4ebd00-2f01-406e-9763-e4e58f33f09d-log-httpd\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.934237 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d4ebd00-2f01-406e-9763-e4e58f33f09d-run-httpd\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.939815 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-scripts\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.939912 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.940144 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-config-data\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.941452 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.941975 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.942934 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-internal-tls-certs\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.945929 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-config-data\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.948471 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d4ebd00-2f01-406e-9763-e4e58f33f09d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.948515 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-public-tls-certs\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.952049 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kklc9\" (UniqueName: \"kubernetes.io/projected/4d4ebd00-2f01-406e-9763-e4e58f33f09d-kube-api-access-kklc9\") pod \"ceilometer-0\" (UID: \"4d4ebd00-2f01-406e-9763-e4e58f33f09d\") " pod="openstack/ceilometer-0" Dec 05 11:11:12 crc kubenswrapper[5014]: I1205 11:11:12.962717 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l7hv\" (UniqueName: \"kubernetes.io/projected/040667ec-88a2-427c-bee5-78451b275439-kube-api-access-4l7hv\") pod \"nova-api-0\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " pod="openstack/nova-api-0" Dec 05 11:11:13 crc kubenswrapper[5014]: I1205 11:11:13.108345 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:11:13 crc kubenswrapper[5014]: I1205 11:11:13.164119 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 11:11:13 crc kubenswrapper[5014]: I1205 11:11:13.331668 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cff6892-6841-4b91-bfb9-f015977f4023" path="/var/lib/kubelet/pods/1cff6892-6841-4b91-bfb9-f015977f4023/volumes" Dec 05 11:11:13 crc kubenswrapper[5014]: I1205 11:11:13.332995 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="743f670f-108f-446c-bee9-fbd36f5cf074" path="/var/lib/kubelet/pods/743f670f-108f-446c-bee9-fbd36f5cf074/volumes" Dec 05 11:11:13 crc kubenswrapper[5014]: I1205 11:11:13.594839 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:13 crc kubenswrapper[5014]: W1205 11:11:13.623841 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod040667ec_88a2_427c_bee5_78451b275439.slice/crio-a582f91dde968a8e56c3a8c030438a14f0de3cb617aca08785a64541712b233a WatchSource:0}: Error finding container a582f91dde968a8e56c3a8c030438a14f0de3cb617aca08785a64541712b233a: Status 404 returned error can't find the container with id a582f91dde968a8e56c3a8c030438a14f0de3cb617aca08785a64541712b233a Dec 05 11:11:13 crc kubenswrapper[5014]: I1205 11:11:13.713669 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 11:11:13 crc kubenswrapper[5014]: I1205 11:11:13.836775 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:13 crc kubenswrapper[5014]: I1205 11:11:13.854147 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.643434 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"040667ec-88a2-427c-bee5-78451b275439","Type":"ContainerStarted","Data":"e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979"} Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.646399 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"040667ec-88a2-427c-bee5-78451b275439","Type":"ContainerStarted","Data":"d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f"} Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.646983 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"040667ec-88a2-427c-bee5-78451b275439","Type":"ContainerStarted","Data":"a582f91dde968a8e56c3a8c030438a14f0de3cb617aca08785a64541712b233a"} Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.648880 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d4ebd00-2f01-406e-9763-e4e58f33f09d","Type":"ContainerStarted","Data":"c091c9f051f5bea974d87de127765cce3e0beb279d42496506ef6d68280d79e8"} Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.648928 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d4ebd00-2f01-406e-9763-e4e58f33f09d","Type":"ContainerStarted","Data":"46a5cda8a76771cabb4c95f8e787dd567607375488a86769e66c821838426913"} Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.666094 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.666071232 podStartE2EDuration="2.666071232s" podCreationTimestamp="2025-12-05 11:11:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:11:14.66312377 +0000 UTC m=+1401.611241484" watchObservedRunningTime="2025-12-05 11:11:14.666071232 +0000 UTC m=+1401.614188936" Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.679007 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.864259 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.884475 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-kvrd6"] Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.885660 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.889359 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.889309 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.920516 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-kvrd6"] Dec 05 11:11:14 crc kubenswrapper[5014]: I1205 11:11:14.956348 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.011918 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-scripts\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.012223 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jddxj\" (UniqueName: \"kubernetes.io/projected/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-kube-api-access-jddxj\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.012314 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.012365 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-config-data\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.049721 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-h9t5f"] Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.050017 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" podUID="3e927441-85c4-4909-8112-66fe509ddb4d" containerName="dnsmasq-dns" containerID="cri-o://5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed" gracePeriod=10 Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.113973 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jddxj\" (UniqueName: \"kubernetes.io/projected/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-kube-api-access-jddxj\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.114410 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.114453 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-config-data\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.114514 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-scripts\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.120178 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-scripts\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.129814 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.132798 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-config-data\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.137442 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jddxj\" (UniqueName: \"kubernetes.io/projected/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-kube-api-access-jddxj\") pod \"nova-cell1-cell-mapping-kvrd6\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.217857 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.589684 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.636627 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-swift-storage-0\") pod \"3e927441-85c4-4909-8112-66fe509ddb4d\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.636687 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-svc\") pod \"3e927441-85c4-4909-8112-66fe509ddb4d\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.636727 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-nb\") pod \"3e927441-85c4-4909-8112-66fe509ddb4d\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.636849 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-config\") pod \"3e927441-85c4-4909-8112-66fe509ddb4d\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.636892 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-sb\") pod \"3e927441-85c4-4909-8112-66fe509ddb4d\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.636977 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8njlt\" (UniqueName: \"kubernetes.io/projected/3e927441-85c4-4909-8112-66fe509ddb4d-kube-api-access-8njlt\") pod \"3e927441-85c4-4909-8112-66fe509ddb4d\" (UID: \"3e927441-85c4-4909-8112-66fe509ddb4d\") " Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.657378 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e927441-85c4-4909-8112-66fe509ddb4d-kube-api-access-8njlt" (OuterVolumeSpecName: "kube-api-access-8njlt") pod "3e927441-85c4-4909-8112-66fe509ddb4d" (UID: "3e927441-85c4-4909-8112-66fe509ddb4d"). InnerVolumeSpecName "kube-api-access-8njlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.677467 5014 generic.go:334] "Generic (PLEG): container finished" podID="3e927441-85c4-4909-8112-66fe509ddb4d" containerID="5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed" exitCode=0 Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.677689 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" event={"ID":"3e927441-85c4-4909-8112-66fe509ddb4d","Type":"ContainerDied","Data":"5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed"} Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.677788 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" event={"ID":"3e927441-85c4-4909-8112-66fe509ddb4d","Type":"ContainerDied","Data":"060334d6a96fb8cb73f2ed6408dbdc1e52b96f0b1ef902bf582f8a32954d6ba5"} Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.677871 5014 scope.go:117] "RemoveContainer" containerID="5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.678056 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.689677 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d4ebd00-2f01-406e-9763-e4e58f33f09d","Type":"ContainerStarted","Data":"52e5b3b7724969bfb2dbb3413749f834bfdfe5c0d1f8f7618d8b84bf56a4506e"} Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.718830 5014 scope.go:117] "RemoveContainer" containerID="42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.735330 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3e927441-85c4-4909-8112-66fe509ddb4d" (UID: "3e927441-85c4-4909-8112-66fe509ddb4d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.739863 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.739902 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8njlt\" (UniqueName: \"kubernetes.io/projected/3e927441-85c4-4909-8112-66fe509ddb4d-kube-api-access-8njlt\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.750159 5014 scope.go:117] "RemoveContainer" containerID="5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed" Dec 05 11:11:15 crc kubenswrapper[5014]: E1205 11:11:15.750641 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed\": container with ID starting with 5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed not found: ID does not exist" containerID="5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.750671 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed"} err="failed to get container status \"5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed\": rpc error: code = NotFound desc = could not find container \"5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed\": container with ID starting with 5e081b359a9cf62de88afddb00ab45b823185ae473a98ecad987f076cdfc10ed not found: ID does not exist" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.750698 5014 scope.go:117] "RemoveContainer" containerID="42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.750779 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3e927441-85c4-4909-8112-66fe509ddb4d" (UID: "3e927441-85c4-4909-8112-66fe509ddb4d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:11:15 crc kubenswrapper[5014]: E1205 11:11:15.751092 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44\": container with ID starting with 42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44 not found: ID does not exist" containerID="42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.751115 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44"} err="failed to get container status \"42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44\": rpc error: code = NotFound desc = could not find container \"42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44\": container with ID starting with 42f779f5afb0a1f0c04c03d86529826a4899102d4480fecee2a74c85f42fcd44 not found: ID does not exist" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.766862 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3e927441-85c4-4909-8112-66fe509ddb4d" (UID: "3e927441-85c4-4909-8112-66fe509ddb4d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.779835 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3e927441-85c4-4909-8112-66fe509ddb4d" (UID: "3e927441-85c4-4909-8112-66fe509ddb4d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.783240 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-config" (OuterVolumeSpecName: "config") pod "3e927441-85c4-4909-8112-66fe509ddb4d" (UID: "3e927441-85c4-4909-8112-66fe509ddb4d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.837324 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-kvrd6"] Dec 05 11:11:15 crc kubenswrapper[5014]: W1205 11:11:15.841463 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ead0c5f_aa2f_4d95_af5f_5e0ee24ecae5.slice/crio-fe27e0b56c605d4158d1f807f450e87c679e9e84cedc3c26f9f533dc56f68274 WatchSource:0}: Error finding container fe27e0b56c605d4158d1f807f450e87c679e9e84cedc3c26f9f533dc56f68274: Status 404 returned error can't find the container with id fe27e0b56c605d4158d1f807f450e87c679e9e84cedc3c26f9f533dc56f68274 Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.841884 5014 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.841918 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.841934 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:15 crc kubenswrapper[5014]: I1205 11:11:15.841945 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e927441-85c4-4909-8112-66fe509ddb4d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:16 crc kubenswrapper[5014]: I1205 11:11:16.076960 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-h9t5f"] Dec 05 11:11:16 crc kubenswrapper[5014]: I1205 11:11:16.085578 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-h9t5f"] Dec 05 11:11:16 crc kubenswrapper[5014]: I1205 11:11:16.699634 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-kvrd6" event={"ID":"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5","Type":"ContainerStarted","Data":"fe27e0b56c605d4158d1f807f450e87c679e9e84cedc3c26f9f533dc56f68274"} Dec 05 11:11:17 crc kubenswrapper[5014]: I1205 11:11:17.332446 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e927441-85c4-4909-8112-66fe509ddb4d" path="/var/lib/kubelet/pods/3e927441-85c4-4909-8112-66fe509ddb4d/volumes" Dec 05 11:11:18 crc kubenswrapper[5014]: I1205 11:11:18.718186 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-kvrd6" event={"ID":"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5","Type":"ContainerStarted","Data":"367313f982c9132ff7236df5ff0db9fc657055bf09cfd30ee498f4926c7248b9"} Dec 05 11:11:18 crc kubenswrapper[5014]: I1205 11:11:18.722187 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d4ebd00-2f01-406e-9763-e4e58f33f09d","Type":"ContainerStarted","Data":"74ef9338fa4b3261bef2346a211bea228b4456a41ffd9a8dd8272f6fb621ff88"} Dec 05 11:11:18 crc kubenswrapper[5014]: I1205 11:11:18.744677 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-kvrd6" podStartSLOduration=4.744659841 podStartE2EDuration="4.744659841s" podCreationTimestamp="2025-12-05 11:11:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:11:18.734484323 +0000 UTC m=+1405.682602037" watchObservedRunningTime="2025-12-05 11:11:18.744659841 +0000 UTC m=+1405.692777545" Dec 05 11:11:20 crc kubenswrapper[5014]: I1205 11:11:20.574787 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-865f5d856f-h9t5f" podUID="3e927441-85c4-4909-8112-66fe509ddb4d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.185:5353: i/o timeout" Dec 05 11:11:21 crc kubenswrapper[5014]: I1205 11:11:21.752376 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d4ebd00-2f01-406e-9763-e4e58f33f09d","Type":"ContainerStarted","Data":"27ec20cdb1b349fb6bc3f5a7cbe4103fe406508e191cef75cf3d293b49c54b55"} Dec 05 11:11:21 crc kubenswrapper[5014]: I1205 11:11:21.754083 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 11:11:22 crc kubenswrapper[5014]: I1205 11:11:22.769259 5014 generic.go:334] "Generic (PLEG): container finished" podID="7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5" containerID="367313f982c9132ff7236df5ff0db9fc657055bf09cfd30ee498f4926c7248b9" exitCode=0 Dec 05 11:11:22 crc kubenswrapper[5014]: I1205 11:11:22.770702 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-kvrd6" event={"ID":"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5","Type":"ContainerDied","Data":"367313f982c9132ff7236df5ff0db9fc657055bf09cfd30ee498f4926c7248b9"} Dec 05 11:11:22 crc kubenswrapper[5014]: I1205 11:11:22.789873 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.438324433 podStartE2EDuration="10.789850025s" podCreationTimestamp="2025-12-05 11:11:12 +0000 UTC" firstStartedPulling="2025-12-05 11:11:13.726233711 +0000 UTC m=+1400.674351415" lastFinishedPulling="2025-12-05 11:11:21.077759303 +0000 UTC m=+1408.025877007" observedRunningTime="2025-12-05 11:11:21.776755276 +0000 UTC m=+1408.724872990" watchObservedRunningTime="2025-12-05 11:11:22.789850025 +0000 UTC m=+1409.737967739" Dec 05 11:11:22 crc kubenswrapper[5014]: E1205 11:11:22.891331 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ead0c5f_aa2f_4d95_af5f_5e0ee24ecae5.slice/crio-367313f982c9132ff7236df5ff0db9fc657055bf09cfd30ee498f4926c7248b9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ead0c5f_aa2f_4d95_af5f_5e0ee24ecae5.slice/crio-conmon-367313f982c9132ff7236df5ff0db9fc657055bf09cfd30ee498f4926c7248b9.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:11:23 crc kubenswrapper[5014]: I1205 11:11:23.108955 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:11:23 crc kubenswrapper[5014]: I1205 11:11:23.109026 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.131495 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="040667ec-88a2-427c-bee5-78451b275439" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.197:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.132110 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="040667ec-88a2-427c-bee5-78451b275439" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.197:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.209539 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.306165 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jddxj\" (UniqueName: \"kubernetes.io/projected/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-kube-api-access-jddxj\") pod \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.306524 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-combined-ca-bundle\") pod \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.306636 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-config-data\") pod \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.306758 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-scripts\") pod \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\" (UID: \"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5\") " Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.312479 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-scripts" (OuterVolumeSpecName: "scripts") pod "7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5" (UID: "7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.313247 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-kube-api-access-jddxj" (OuterVolumeSpecName: "kube-api-access-jddxj") pod "7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5" (UID: "7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5"). InnerVolumeSpecName "kube-api-access-jddxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.339109 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5" (UID: "7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.346439 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-config-data" (OuterVolumeSpecName: "config-data") pod "7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5" (UID: "7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.409253 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.409309 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.409324 5014 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.409336 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jddxj\" (UniqueName: \"kubernetes.io/projected/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5-kube-api-access-jddxj\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.866625 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-kvrd6" event={"ID":"7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5","Type":"ContainerDied","Data":"fe27e0b56c605d4158d1f807f450e87c679e9e84cedc3c26f9f533dc56f68274"} Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.867304 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe27e0b56c605d4158d1f807f450e87c679e9e84cedc3c26f9f533dc56f68274" Dec 05 11:11:24 crc kubenswrapper[5014]: I1205 11:11:24.888759 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-kvrd6" Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.100595 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.100862 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="040667ec-88a2-427c-bee5-78451b275439" containerName="nova-api-log" containerID="cri-o://d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f" gracePeriod=30 Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.101332 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="040667ec-88a2-427c-bee5-78451b275439" containerName="nova-api-api" containerID="cri-o://e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979" gracePeriod=30 Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.155973 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.156225 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-log" containerID="cri-o://c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32" gracePeriod=30 Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.156691 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-metadata" containerID="cri-o://2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f" gracePeriod=30 Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.168737 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.169006 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="604552d0-1f4f-4ae5-b515-d95071b12f38" containerName="nova-scheduler-scheduler" containerID="cri-o://561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249" gracePeriod=30 Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.879383 5014 generic.go:334] "Generic (PLEG): container finished" podID="040667ec-88a2-427c-bee5-78451b275439" containerID="d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f" exitCode=143 Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.879436 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"040667ec-88a2-427c-bee5-78451b275439","Type":"ContainerDied","Data":"d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f"} Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.882262 5014 generic.go:334] "Generic (PLEG): container finished" podID="30ec1d91-2613-41f0-92b4-2c195597789d" containerID="c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32" exitCode=143 Dec 05 11:11:25 crc kubenswrapper[5014]: I1205 11:11:25.882294 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"30ec1d91-2613-41f0-92b4-2c195597789d","Type":"ContainerDied","Data":"c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32"} Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.290214 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:44494->10.217.0.191:8775: read: connection reset by peer" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.290214 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:44492->10.217.0.191:8775: read: connection reset by peer" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.759169 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.848196 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-config-data\") pod \"30ec1d91-2613-41f0-92b4-2c195597789d\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.848359 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30ec1d91-2613-41f0-92b4-2c195597789d-logs\") pod \"30ec1d91-2613-41f0-92b4-2c195597789d\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.848539 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-nova-metadata-tls-certs\") pod \"30ec1d91-2613-41f0-92b4-2c195597789d\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.848614 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55mdw\" (UniqueName: \"kubernetes.io/projected/30ec1d91-2613-41f0-92b4-2c195597789d-kube-api-access-55mdw\") pod \"30ec1d91-2613-41f0-92b4-2c195597789d\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.848678 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-combined-ca-bundle\") pod \"30ec1d91-2613-41f0-92b4-2c195597789d\" (UID: \"30ec1d91-2613-41f0-92b4-2c195597789d\") " Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.849706 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30ec1d91-2613-41f0-92b4-2c195597789d-logs" (OuterVolumeSpecName: "logs") pod "30ec1d91-2613-41f0-92b4-2c195597789d" (UID: "30ec1d91-2613-41f0-92b4-2c195597789d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.860173 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30ec1d91-2613-41f0-92b4-2c195597789d-kube-api-access-55mdw" (OuterVolumeSpecName: "kube-api-access-55mdw") pod "30ec1d91-2613-41f0-92b4-2c195597789d" (UID: "30ec1d91-2613-41f0-92b4-2c195597789d"). InnerVolumeSpecName "kube-api-access-55mdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.866916 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.885863 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-config-data" (OuterVolumeSpecName: "config-data") pod "30ec1d91-2613-41f0-92b4-2c195597789d" (UID: "30ec1d91-2613-41f0-92b4-2c195597789d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.922281 5014 generic.go:334] "Generic (PLEG): container finished" podID="30ec1d91-2613-41f0-92b4-2c195597789d" containerID="2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f" exitCode=0 Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.922357 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.922402 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"30ec1d91-2613-41f0-92b4-2c195597789d","Type":"ContainerDied","Data":"2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f"} Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.922434 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"30ec1d91-2613-41f0-92b4-2c195597789d","Type":"ContainerDied","Data":"f74922d873c1ba080cadea53bd3e1a9928bc0169f47d5b3570769b79248e9ca4"} Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.922453 5014 scope.go:117] "RemoveContainer" containerID="2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.927542 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30ec1d91-2613-41f0-92b4-2c195597789d" (UID: "30ec1d91-2613-41f0-92b4-2c195597789d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.930030 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.929972 5014 generic.go:334] "Generic (PLEG): container finished" podID="604552d0-1f4f-4ae5-b515-d95071b12f38" containerID="561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249" exitCode=0 Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.930224 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"604552d0-1f4f-4ae5-b515-d95071b12f38","Type":"ContainerDied","Data":"561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249"} Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.931110 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"604552d0-1f4f-4ae5-b515-d95071b12f38","Type":"ContainerDied","Data":"035154dbe6fa9ccd9ae3180dd89519edaabad1ff455bf7c8aa9659ffeb1fa4c7"} Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.939837 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "30ec1d91-2613-41f0-92b4-2c195597789d" (UID: "30ec1d91-2613-41f0-92b4-2c195597789d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.950562 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-config-data\") pod \"604552d0-1f4f-4ae5-b515-d95071b12f38\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.950879 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tj8z\" (UniqueName: \"kubernetes.io/projected/604552d0-1f4f-4ae5-b515-d95071b12f38-kube-api-access-7tj8z\") pod \"604552d0-1f4f-4ae5-b515-d95071b12f38\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.950973 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-combined-ca-bundle\") pod \"604552d0-1f4f-4ae5-b515-d95071b12f38\" (UID: \"604552d0-1f4f-4ae5-b515-d95071b12f38\") " Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.951441 5014 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.951455 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55mdw\" (UniqueName: \"kubernetes.io/projected/30ec1d91-2613-41f0-92b4-2c195597789d-kube-api-access-55mdw\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.951466 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.951478 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ec1d91-2613-41f0-92b4-2c195597789d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.951491 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30ec1d91-2613-41f0-92b4-2c195597789d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.955795 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/604552d0-1f4f-4ae5-b515-d95071b12f38-kube-api-access-7tj8z" (OuterVolumeSpecName: "kube-api-access-7tj8z") pod "604552d0-1f4f-4ae5-b515-d95071b12f38" (UID: "604552d0-1f4f-4ae5-b515-d95071b12f38"). InnerVolumeSpecName "kube-api-access-7tj8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.963488 5014 scope.go:117] "RemoveContainer" containerID="c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.980824 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-config-data" (OuterVolumeSpecName: "config-data") pod "604552d0-1f4f-4ae5-b515-d95071b12f38" (UID: "604552d0-1f4f-4ae5-b515-d95071b12f38"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.986769 5014 scope.go:117] "RemoveContainer" containerID="2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f" Dec 05 11:11:28 crc kubenswrapper[5014]: E1205 11:11:28.987661 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f\": container with ID starting with 2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f not found: ID does not exist" containerID="2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.987694 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f"} err="failed to get container status \"2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f\": rpc error: code = NotFound desc = could not find container \"2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f\": container with ID starting with 2127e3f1cfcef708593e0e3057c9a946bfa2e81a418ee95a3e53502f1eaafe4f not found: ID does not exist" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.987724 5014 scope.go:117] "RemoveContainer" containerID="c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32" Dec 05 11:11:28 crc kubenswrapper[5014]: E1205 11:11:28.988050 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32\": container with ID starting with c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32 not found: ID does not exist" containerID="c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.988073 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32"} err="failed to get container status \"c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32\": rpc error: code = NotFound desc = could not find container \"c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32\": container with ID starting with c4a064fc487df154c95d67bf8fc8665899ab3eaf82f82ad482f09ea003341d32 not found: ID does not exist" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.988086 5014 scope.go:117] "RemoveContainer" containerID="561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249" Dec 05 11:11:28 crc kubenswrapper[5014]: I1205 11:11:28.989852 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "604552d0-1f4f-4ae5-b515-d95071b12f38" (UID: "604552d0-1f4f-4ae5-b515-d95071b12f38"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.007884 5014 scope.go:117] "RemoveContainer" containerID="561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249" Dec 05 11:11:29 crc kubenswrapper[5014]: E1205 11:11:29.008525 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249\": container with ID starting with 561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249 not found: ID does not exist" containerID="561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.008557 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249"} err="failed to get container status \"561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249\": rpc error: code = NotFound desc = could not find container \"561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249\": container with ID starting with 561b549ccfc9954603acbc03b06f417005467d9fac5d965d8b590bedf5114249 not found: ID does not exist" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.053492 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.053813 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tj8z\" (UniqueName: \"kubernetes.io/projected/604552d0-1f4f-4ae5-b515-d95071b12f38-kube-api-access-7tj8z\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.053825 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/604552d0-1f4f-4ae5-b515-d95071b12f38-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.291410 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.300928 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.312649 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.335576 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" path="/var/lib/kubelet/pods/30ec1d91-2613-41f0-92b4-2c195597789d/volumes" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.336207 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.338027 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:11:29 crc kubenswrapper[5014]: E1205 11:11:29.339711 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5" containerName="nova-manage" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.339741 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5" containerName="nova-manage" Dec 05 11:11:29 crc kubenswrapper[5014]: E1205 11:11:29.339760 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e927441-85c4-4909-8112-66fe509ddb4d" containerName="init" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.339767 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e927441-85c4-4909-8112-66fe509ddb4d" containerName="init" Dec 05 11:11:29 crc kubenswrapper[5014]: E1205 11:11:29.339783 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-metadata" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.339791 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-metadata" Dec 05 11:11:29 crc kubenswrapper[5014]: E1205 11:11:29.339814 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-log" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.339820 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-log" Dec 05 11:11:29 crc kubenswrapper[5014]: E1205 11:11:29.339829 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e927441-85c4-4909-8112-66fe509ddb4d" containerName="dnsmasq-dns" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.339835 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e927441-85c4-4909-8112-66fe509ddb4d" containerName="dnsmasq-dns" Dec 05 11:11:29 crc kubenswrapper[5014]: E1205 11:11:29.339846 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="604552d0-1f4f-4ae5-b515-d95071b12f38" containerName="nova-scheduler-scheduler" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.339852 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="604552d0-1f4f-4ae5-b515-d95071b12f38" containerName="nova-scheduler-scheduler" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.340070 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-metadata" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.340090 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="30ec1d91-2613-41f0-92b4-2c195597789d" containerName="nova-metadata-log" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.340109 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="604552d0-1f4f-4ae5-b515-d95071b12f38" containerName="nova-scheduler-scheduler" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.340133 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e927441-85c4-4909-8112-66fe509ddb4d" containerName="dnsmasq-dns" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.340145 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5" containerName="nova-manage" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.341421 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.343763 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.343776 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.353553 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.365683 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.366954 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.368944 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.383173 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.466101 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8ls7z"] Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.468110 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.475352 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0e65cd2-d320-4d94-8ea2-034e56ba5880-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.475415 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmdh7\" (UniqueName: \"kubernetes.io/projected/c0e65cd2-d320-4d94-8ea2-034e56ba5880-kube-api-access-vmdh7\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.475445 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1e7313-1a79-42e6-b286-0046ddd16e69-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4b1e7313-1a79-42e6-b286-0046ddd16e69\") " pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.475483 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0e65cd2-d320-4d94-8ea2-034e56ba5880-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.475556 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0e65cd2-d320-4d94-8ea2-034e56ba5880-config-data\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.475586 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2mfg\" (UniqueName: \"kubernetes.io/projected/4b1e7313-1a79-42e6-b286-0046ddd16e69-kube-api-access-h2mfg\") pod \"nova-scheduler-0\" (UID: \"4b1e7313-1a79-42e6-b286-0046ddd16e69\") " pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.475611 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1e7313-1a79-42e6-b286-0046ddd16e69-config-data\") pod \"nova-scheduler-0\" (UID: \"4b1e7313-1a79-42e6-b286-0046ddd16e69\") " pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.475624 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0e65cd2-d320-4d94-8ea2-034e56ba5880-logs\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.483892 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8ls7z"] Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.577940 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0e65cd2-d320-4d94-8ea2-034e56ba5880-config-data\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.578016 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2mfg\" (UniqueName: \"kubernetes.io/projected/4b1e7313-1a79-42e6-b286-0046ddd16e69-kube-api-access-h2mfg\") pod \"nova-scheduler-0\" (UID: \"4b1e7313-1a79-42e6-b286-0046ddd16e69\") " pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.578054 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1e7313-1a79-42e6-b286-0046ddd16e69-config-data\") pod \"nova-scheduler-0\" (UID: \"4b1e7313-1a79-42e6-b286-0046ddd16e69\") " pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.578073 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0e65cd2-d320-4d94-8ea2-034e56ba5880-logs\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.578132 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz6wf\" (UniqueName: \"kubernetes.io/projected/906af090-57c1-4115-aa37-83ecc098261b-kube-api-access-nz6wf\") pod \"redhat-operators-8ls7z\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.578164 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-utilities\") pod \"redhat-operators-8ls7z\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.578197 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-catalog-content\") pod \"redhat-operators-8ls7z\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.578262 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0e65cd2-d320-4d94-8ea2-034e56ba5880-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.578360 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmdh7\" (UniqueName: \"kubernetes.io/projected/c0e65cd2-d320-4d94-8ea2-034e56ba5880-kube-api-access-vmdh7\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.578386 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1e7313-1a79-42e6-b286-0046ddd16e69-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4b1e7313-1a79-42e6-b286-0046ddd16e69\") " pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.578436 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0e65cd2-d320-4d94-8ea2-034e56ba5880-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.580236 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0e65cd2-d320-4d94-8ea2-034e56ba5880-logs\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.583478 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0e65cd2-d320-4d94-8ea2-034e56ba5880-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.583557 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0e65cd2-d320-4d94-8ea2-034e56ba5880-config-data\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.585012 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1e7313-1a79-42e6-b286-0046ddd16e69-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4b1e7313-1a79-42e6-b286-0046ddd16e69\") " pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.585932 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1e7313-1a79-42e6-b286-0046ddd16e69-config-data\") pod \"nova-scheduler-0\" (UID: \"4b1e7313-1a79-42e6-b286-0046ddd16e69\") " pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.599432 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmdh7\" (UniqueName: \"kubernetes.io/projected/c0e65cd2-d320-4d94-8ea2-034e56ba5880-kube-api-access-vmdh7\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.602166 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2mfg\" (UniqueName: \"kubernetes.io/projected/4b1e7313-1a79-42e6-b286-0046ddd16e69-kube-api-access-h2mfg\") pod \"nova-scheduler-0\" (UID: \"4b1e7313-1a79-42e6-b286-0046ddd16e69\") " pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.602549 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0e65cd2-d320-4d94-8ea2-034e56ba5880-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c0e65cd2-d320-4d94-8ea2-034e56ba5880\") " pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.664734 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.680940 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz6wf\" (UniqueName: \"kubernetes.io/projected/906af090-57c1-4115-aa37-83ecc098261b-kube-api-access-nz6wf\") pod \"redhat-operators-8ls7z\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.680990 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-utilities\") pod \"redhat-operators-8ls7z\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.681019 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-catalog-content\") pod \"redhat-operators-8ls7z\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.682832 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-catalog-content\") pod \"redhat-operators-8ls7z\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.682980 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-utilities\") pod \"redhat-operators-8ls7z\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.712885 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz6wf\" (UniqueName: \"kubernetes.io/projected/906af090-57c1-4115-aa37-83ecc098261b-kube-api-access-nz6wf\") pod \"redhat-operators-8ls7z\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.738816 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.799156 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.915337 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.975589 5014 generic.go:334] "Generic (PLEG): container finished" podID="040667ec-88a2-427c-bee5-78451b275439" containerID="e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979" exitCode=0 Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.976341 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.976574 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"040667ec-88a2-427c-bee5-78451b275439","Type":"ContainerDied","Data":"e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979"} Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.976640 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"040667ec-88a2-427c-bee5-78451b275439","Type":"ContainerDied","Data":"a582f91dde968a8e56c3a8c030438a14f0de3cb617aca08785a64541712b233a"} Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.976664 5014 scope.go:117] "RemoveContainer" containerID="e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.987409 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-combined-ca-bundle\") pod \"040667ec-88a2-427c-bee5-78451b275439\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.987504 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/040667ec-88a2-427c-bee5-78451b275439-logs\") pod \"040667ec-88a2-427c-bee5-78451b275439\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.987744 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4l7hv\" (UniqueName: \"kubernetes.io/projected/040667ec-88a2-427c-bee5-78451b275439-kube-api-access-4l7hv\") pod \"040667ec-88a2-427c-bee5-78451b275439\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.987798 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-config-data\") pod \"040667ec-88a2-427c-bee5-78451b275439\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.987897 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-internal-tls-certs\") pod \"040667ec-88a2-427c-bee5-78451b275439\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.987924 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-public-tls-certs\") pod \"040667ec-88a2-427c-bee5-78451b275439\" (UID: \"040667ec-88a2-427c-bee5-78451b275439\") " Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.988453 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/040667ec-88a2-427c-bee5-78451b275439-logs" (OuterVolumeSpecName: "logs") pod "040667ec-88a2-427c-bee5-78451b275439" (UID: "040667ec-88a2-427c-bee5-78451b275439"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.988779 5014 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/040667ec-88a2-427c-bee5-78451b275439-logs\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:29 crc kubenswrapper[5014]: I1205 11:11:29.997486 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/040667ec-88a2-427c-bee5-78451b275439-kube-api-access-4l7hv" (OuterVolumeSpecName: "kube-api-access-4l7hv") pod "040667ec-88a2-427c-bee5-78451b275439" (UID: "040667ec-88a2-427c-bee5-78451b275439"). InnerVolumeSpecName "kube-api-access-4l7hv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.020367 5014 scope.go:117] "RemoveContainer" containerID="d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.032598 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-config-data" (OuterVolumeSpecName: "config-data") pod "040667ec-88a2-427c-bee5-78451b275439" (UID: "040667ec-88a2-427c-bee5-78451b275439"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.049318 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "040667ec-88a2-427c-bee5-78451b275439" (UID: "040667ec-88a2-427c-bee5-78451b275439"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.058004 5014 scope.go:117] "RemoveContainer" containerID="e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979" Dec 05 11:11:30 crc kubenswrapper[5014]: E1205 11:11:30.060028 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979\": container with ID starting with e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979 not found: ID does not exist" containerID="e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.060091 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979"} err="failed to get container status \"e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979\": rpc error: code = NotFound desc = could not find container \"e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979\": container with ID starting with e3e25f28097f0bf4fab10dd95139c163589691547427b39650f4d072581e4979 not found: ID does not exist" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.060126 5014 scope.go:117] "RemoveContainer" containerID="d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f" Dec 05 11:11:30 crc kubenswrapper[5014]: E1205 11:11:30.060635 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f\": container with ID starting with d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f not found: ID does not exist" containerID="d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.060714 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f"} err="failed to get container status \"d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f\": rpc error: code = NotFound desc = could not find container \"d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f\": container with ID starting with d27cfdf1d109a6049f859cf21741154b7f6b3eeb62c1e8ccbd78f7d322c2bc7f not found: ID does not exist" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.093801 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.095531 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4l7hv\" (UniqueName: \"kubernetes.io/projected/040667ec-88a2-427c-bee5-78451b275439-kube-api-access-4l7hv\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.095677 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.096185 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "040667ec-88a2-427c-bee5-78451b275439" (UID: "040667ec-88a2-427c-bee5-78451b275439"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.099805 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "040667ec-88a2-427c-bee5-78451b275439" (UID: "040667ec-88a2-427c-bee5-78451b275439"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.198052 5014 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.198078 5014 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/040667ec-88a2-427c-bee5-78451b275439-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.322893 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:30 crc kubenswrapper[5014]: W1205 11:11:30.331851 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0e65cd2_d320_4d94_8ea2_034e56ba5880.slice/crio-438dea50de331df3e4d452c6e9ca0ba54927fee28d9a0bd72253765779ff81d4 WatchSource:0}: Error finding container 438dea50de331df3e4d452c6e9ca0ba54927fee28d9a0bd72253765779ff81d4: Status 404 returned error can't find the container with id 438dea50de331df3e4d452c6e9ca0ba54927fee28d9a0bd72253765779ff81d4 Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.342430 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.356336 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.364498 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:30 crc kubenswrapper[5014]: E1205 11:11:30.365103 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="040667ec-88a2-427c-bee5-78451b275439" containerName="nova-api-api" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.365130 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="040667ec-88a2-427c-bee5-78451b275439" containerName="nova-api-api" Dec 05 11:11:30 crc kubenswrapper[5014]: E1205 11:11:30.365149 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="040667ec-88a2-427c-bee5-78451b275439" containerName="nova-api-log" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.365158 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="040667ec-88a2-427c-bee5-78451b275439" containerName="nova-api-log" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.365413 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="040667ec-88a2-427c-bee5-78451b275439" containerName="nova-api-log" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.365440 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="040667ec-88a2-427c-bee5-78451b275439" containerName="nova-api-api" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.366883 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.370247 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.370745 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.370967 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.375122 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.386948 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.401784 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-927lc\" (UniqueName: \"kubernetes.io/projected/512551f9-cbaf-4245-9c35-68a0d6adc709-kube-api-access-927lc\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.401886 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-config-data\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.401975 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-public-tls-certs\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.404245 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-internal-tls-certs\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.404383 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/512551f9-cbaf-4245-9c35-68a0d6adc709-logs\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.404432 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: W1205 11:11:30.443540 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod906af090_57c1_4115_aa37_83ecc098261b.slice/crio-801583c22b207f994f65a8bcdd9b4d00f0b0008a4ce921e625e192c7635fa827 WatchSource:0}: Error finding container 801583c22b207f994f65a8bcdd9b4d00f0b0008a4ce921e625e192c7635fa827: Status 404 returned error can't find the container with id 801583c22b207f994f65a8bcdd9b4d00f0b0008a4ce921e625e192c7635fa827 Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.444547 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8ls7z"] Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.506744 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-config-data\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.507050 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-public-tls-certs\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.507174 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-internal-tls-certs\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.507218 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/512551f9-cbaf-4245-9c35-68a0d6adc709-logs\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.507453 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.507580 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-927lc\" (UniqueName: \"kubernetes.io/projected/512551f9-cbaf-4245-9c35-68a0d6adc709-kube-api-access-927lc\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.507767 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/512551f9-cbaf-4245-9c35-68a0d6adc709-logs\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.511663 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-internal-tls-certs\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.512117 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.513481 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-public-tls-certs\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.513634 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/512551f9-cbaf-4245-9c35-68a0d6adc709-config-data\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.527643 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-927lc\" (UniqueName: \"kubernetes.io/projected/512551f9-cbaf-4245-9c35-68a0d6adc709-kube-api-access-927lc\") pod \"nova-api-0\" (UID: \"512551f9-cbaf-4245-9c35-68a0d6adc709\") " pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.629533 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.997502 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4b1e7313-1a79-42e6-b286-0046ddd16e69","Type":"ContainerStarted","Data":"c39ec3b44b7e36e0500b12545608485ace841dd190b76970e04a2ba6a1afe57e"} Dec 05 11:11:30 crc kubenswrapper[5014]: I1205 11:11:30.997815 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4b1e7313-1a79-42e6-b286-0046ddd16e69","Type":"ContainerStarted","Data":"62987ce5e41f18c0cfa701fcf01aec0b21eeed6c6e0b4d847bd140414e122d46"} Dec 05 11:11:31 crc kubenswrapper[5014]: I1205 11:11:31.001608 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c0e65cd2-d320-4d94-8ea2-034e56ba5880","Type":"ContainerStarted","Data":"822539b20e4c7e4f26c180184cd84924388b9cf6ebfed8cb4fb475c1ea9699e4"} Dec 05 11:11:31 crc kubenswrapper[5014]: I1205 11:11:31.001665 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c0e65cd2-d320-4d94-8ea2-034e56ba5880","Type":"ContainerStarted","Data":"438dea50de331df3e4d452c6e9ca0ba54927fee28d9a0bd72253765779ff81d4"} Dec 05 11:11:31 crc kubenswrapper[5014]: I1205 11:11:31.004224 5014 generic.go:334] "Generic (PLEG): container finished" podID="906af090-57c1-4115-aa37-83ecc098261b" containerID="dd381cb4fce22eacd5a45500bed46b9eca9f1f4a3d6829d399da1bce225e1cde" exitCode=0 Dec 05 11:11:31 crc kubenswrapper[5014]: I1205 11:11:31.004347 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ls7z" event={"ID":"906af090-57c1-4115-aa37-83ecc098261b","Type":"ContainerDied","Data":"dd381cb4fce22eacd5a45500bed46b9eca9f1f4a3d6829d399da1bce225e1cde"} Dec 05 11:11:31 crc kubenswrapper[5014]: I1205 11:11:31.004418 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ls7z" event={"ID":"906af090-57c1-4115-aa37-83ecc098261b","Type":"ContainerStarted","Data":"801583c22b207f994f65a8bcdd9b4d00f0b0008a4ce921e625e192c7635fa827"} Dec 05 11:11:31 crc kubenswrapper[5014]: I1205 11:11:31.041285 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.041242604 podStartE2EDuration="2.041242604s" podCreationTimestamp="2025-12-05 11:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:11:31.014213164 +0000 UTC m=+1417.962330878" watchObservedRunningTime="2025-12-05 11:11:31.041242604 +0000 UTC m=+1417.989360308" Dec 05 11:11:31 crc kubenswrapper[5014]: W1205 11:11:31.114156 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod512551f9_cbaf_4245_9c35_68a0d6adc709.slice/crio-eaa22245d999b808d9b6f9db2dbbadd05f9a2f12d999da267c37cb1028e3d68e WatchSource:0}: Error finding container eaa22245d999b808d9b6f9db2dbbadd05f9a2f12d999da267c37cb1028e3d68e: Status 404 returned error can't find the container with id eaa22245d999b808d9b6f9db2dbbadd05f9a2f12d999da267c37cb1028e3d68e Dec 05 11:11:31 crc kubenswrapper[5014]: I1205 11:11:31.120344 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 11:11:31 crc kubenswrapper[5014]: I1205 11:11:31.342246 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="040667ec-88a2-427c-bee5-78451b275439" path="/var/lib/kubelet/pods/040667ec-88a2-427c-bee5-78451b275439/volumes" Dec 05 11:11:31 crc kubenswrapper[5014]: I1205 11:11:31.342837 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="604552d0-1f4f-4ae5-b515-d95071b12f38" path="/var/lib/kubelet/pods/604552d0-1f4f-4ae5-b515-d95071b12f38/volumes" Dec 05 11:11:32 crc kubenswrapper[5014]: I1205 11:11:32.013758 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ls7z" event={"ID":"906af090-57c1-4115-aa37-83ecc098261b","Type":"ContainerStarted","Data":"769a083fdfe79dfeca4b72191896a17dc15e37c1ee7c663e5faa0298f710ac9a"} Dec 05 11:11:32 crc kubenswrapper[5014]: I1205 11:11:32.017804 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c0e65cd2-d320-4d94-8ea2-034e56ba5880","Type":"ContainerStarted","Data":"ff34a28e80c0f44e95f9f28b47101633929e4d3eddac2484ab59317a45526d94"} Dec 05 11:11:32 crc kubenswrapper[5014]: I1205 11:11:32.020622 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"512551f9-cbaf-4245-9c35-68a0d6adc709","Type":"ContainerStarted","Data":"4b29bc94ae5e6836ad42e75ec46f8eaff5a2f9b14d737664e8a54152376bc88b"} Dec 05 11:11:32 crc kubenswrapper[5014]: I1205 11:11:32.020653 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"512551f9-cbaf-4245-9c35-68a0d6adc709","Type":"ContainerStarted","Data":"7b354c60b42724dc34e6c3208694073128c012b7d52d43e2f4b87b4b9fdc56b6"} Dec 05 11:11:32 crc kubenswrapper[5014]: I1205 11:11:32.020664 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"512551f9-cbaf-4245-9c35-68a0d6adc709","Type":"ContainerStarted","Data":"eaa22245d999b808d9b6f9db2dbbadd05f9a2f12d999da267c37cb1028e3d68e"} Dec 05 11:11:32 crc kubenswrapper[5014]: I1205 11:11:32.056781 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.056762293 podStartE2EDuration="3.056762293s" podCreationTimestamp="2025-12-05 11:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:11:32.053642117 +0000 UTC m=+1419.001759821" watchObservedRunningTime="2025-12-05 11:11:32.056762293 +0000 UTC m=+1419.004879997" Dec 05 11:11:32 crc kubenswrapper[5014]: I1205 11:11:32.075741 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.075718536 podStartE2EDuration="2.075718536s" podCreationTimestamp="2025-12-05 11:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:11:32.072394964 +0000 UTC m=+1419.020512678" watchObservedRunningTime="2025-12-05 11:11:32.075718536 +0000 UTC m=+1419.023836250" Dec 05 11:11:32 crc kubenswrapper[5014]: I1205 11:11:32.936209 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:11:32 crc kubenswrapper[5014]: I1205 11:11:32.936546 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:11:33 crc kubenswrapper[5014]: I1205 11:11:33.030692 5014 generic.go:334] "Generic (PLEG): container finished" podID="906af090-57c1-4115-aa37-83ecc098261b" containerID="769a083fdfe79dfeca4b72191896a17dc15e37c1ee7c663e5faa0298f710ac9a" exitCode=0 Dec 05 11:11:33 crc kubenswrapper[5014]: I1205 11:11:33.032039 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ls7z" event={"ID":"906af090-57c1-4115-aa37-83ecc098261b","Type":"ContainerDied","Data":"769a083fdfe79dfeca4b72191896a17dc15e37c1ee7c663e5faa0298f710ac9a"} Dec 05 11:11:33 crc kubenswrapper[5014]: I1205 11:11:33.441728 5014 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podc1290dfc-17fd-46a1-8f34-d1e338523945"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podc1290dfc-17fd-46a1-8f34-d1e338523945] : Timed out while waiting for systemd to remove kubepods-besteffort-podc1290dfc_17fd_46a1_8f34_d1e338523945.slice" Dec 05 11:11:34 crc kubenswrapper[5014]: I1205 11:11:34.665586 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 11:11:34 crc kubenswrapper[5014]: I1205 11:11:34.665953 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 11:11:34 crc kubenswrapper[5014]: I1205 11:11:34.739591 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 11:11:35 crc kubenswrapper[5014]: I1205 11:11:35.052322 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ls7z" event={"ID":"906af090-57c1-4115-aa37-83ecc098261b","Type":"ContainerStarted","Data":"4bd97d22171d528969df57f4590d4f93b05b4918212c98b396263968815f4bc4"} Dec 05 11:11:35 crc kubenswrapper[5014]: I1205 11:11:35.076978 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8ls7z" podStartSLOduration=2.937125552 podStartE2EDuration="6.076955606s" podCreationTimestamp="2025-12-05 11:11:29 +0000 UTC" firstStartedPulling="2025-12-05 11:11:31.01283309 +0000 UTC m=+1417.960950794" lastFinishedPulling="2025-12-05 11:11:34.152663144 +0000 UTC m=+1421.100780848" observedRunningTime="2025-12-05 11:11:35.069036073 +0000 UTC m=+1422.017153787" watchObservedRunningTime="2025-12-05 11:11:35.076955606 +0000 UTC m=+1422.025073320" Dec 05 11:11:39 crc kubenswrapper[5014]: I1205 11:11:39.666138 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 11:11:39 crc kubenswrapper[5014]: I1205 11:11:39.666733 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 11:11:39 crc kubenswrapper[5014]: I1205 11:11:39.739756 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 11:11:39 crc kubenswrapper[5014]: I1205 11:11:39.776871 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 11:11:39 crc kubenswrapper[5014]: I1205 11:11:39.799334 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:39 crc kubenswrapper[5014]: I1205 11:11:39.800067 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:39 crc kubenswrapper[5014]: I1205 11:11:39.857189 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:40 crc kubenswrapper[5014]: I1205 11:11:40.137386 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 11:11:40 crc kubenswrapper[5014]: I1205 11:11:40.152657 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:40 crc kubenswrapper[5014]: I1205 11:11:40.206932 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8ls7z"] Dec 05 11:11:40 crc kubenswrapper[5014]: I1205 11:11:40.630801 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:11:40 crc kubenswrapper[5014]: I1205 11:11:40.632345 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 11:11:40 crc kubenswrapper[5014]: I1205 11:11:40.686650 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c0e65cd2-d320-4d94-8ea2-034e56ba5880" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:11:40 crc kubenswrapper[5014]: I1205 11:11:40.686980 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c0e65cd2-d320-4d94-8ea2-034e56ba5880" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:11:41 crc kubenswrapper[5014]: I1205 11:11:41.641417 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="512551f9-cbaf-4245-9c35-68a0d6adc709" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:11:41 crc kubenswrapper[5014]: I1205 11:11:41.641491 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="512551f9-cbaf-4245-9c35-68a0d6adc709" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 11:11:42 crc kubenswrapper[5014]: I1205 11:11:42.128298 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8ls7z" podUID="906af090-57c1-4115-aa37-83ecc098261b" containerName="registry-server" containerID="cri-o://4bd97d22171d528969df57f4590d4f93b05b4918212c98b396263968815f4bc4" gracePeriod=2 Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.142411 5014 generic.go:334] "Generic (PLEG): container finished" podID="906af090-57c1-4115-aa37-83ecc098261b" containerID="4bd97d22171d528969df57f4590d4f93b05b4918212c98b396263968815f4bc4" exitCode=0 Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.142455 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ls7z" event={"ID":"906af090-57c1-4115-aa37-83ecc098261b","Type":"ContainerDied","Data":"4bd97d22171d528969df57f4590d4f93b05b4918212c98b396263968815f4bc4"} Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.182576 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.272063 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.363987 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-utilities\") pod \"906af090-57c1-4115-aa37-83ecc098261b\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.364602 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-catalog-content\") pod \"906af090-57c1-4115-aa37-83ecc098261b\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.364765 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz6wf\" (UniqueName: \"kubernetes.io/projected/906af090-57c1-4115-aa37-83ecc098261b-kube-api-access-nz6wf\") pod \"906af090-57c1-4115-aa37-83ecc098261b\" (UID: \"906af090-57c1-4115-aa37-83ecc098261b\") " Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.367693 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-utilities" (OuterVolumeSpecName: "utilities") pod "906af090-57c1-4115-aa37-83ecc098261b" (UID: "906af090-57c1-4115-aa37-83ecc098261b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.373709 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/906af090-57c1-4115-aa37-83ecc098261b-kube-api-access-nz6wf" (OuterVolumeSpecName: "kube-api-access-nz6wf") pod "906af090-57c1-4115-aa37-83ecc098261b" (UID: "906af090-57c1-4115-aa37-83ecc098261b"). InnerVolumeSpecName "kube-api-access-nz6wf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.467945 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz6wf\" (UniqueName: \"kubernetes.io/projected/906af090-57c1-4115-aa37-83ecc098261b-kube-api-access-nz6wf\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.467984 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.478233 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "906af090-57c1-4115-aa37-83ecc098261b" (UID: "906af090-57c1-4115-aa37-83ecc098261b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:11:43 crc kubenswrapper[5014]: I1205 11:11:43.569837 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/906af090-57c1-4115-aa37-83ecc098261b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:11:44 crc kubenswrapper[5014]: I1205 11:11:44.155214 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ls7z" event={"ID":"906af090-57c1-4115-aa37-83ecc098261b","Type":"ContainerDied","Data":"801583c22b207f994f65a8bcdd9b4d00f0b0008a4ce921e625e192c7635fa827"} Dec 05 11:11:44 crc kubenswrapper[5014]: I1205 11:11:44.155993 5014 scope.go:117] "RemoveContainer" containerID="4bd97d22171d528969df57f4590d4f93b05b4918212c98b396263968815f4bc4" Dec 05 11:11:44 crc kubenswrapper[5014]: I1205 11:11:44.155343 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8ls7z" Dec 05 11:11:44 crc kubenswrapper[5014]: I1205 11:11:44.180018 5014 scope.go:117] "RemoveContainer" containerID="769a083fdfe79dfeca4b72191896a17dc15e37c1ee7c663e5faa0298f710ac9a" Dec 05 11:11:44 crc kubenswrapper[5014]: I1205 11:11:44.196903 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8ls7z"] Dec 05 11:11:44 crc kubenswrapper[5014]: I1205 11:11:44.206537 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8ls7z"] Dec 05 11:11:44 crc kubenswrapper[5014]: I1205 11:11:44.208657 5014 scope.go:117] "RemoveContainer" containerID="dd381cb4fce22eacd5a45500bed46b9eca9f1f4a3d6829d399da1bce225e1cde" Dec 05 11:11:45 crc kubenswrapper[5014]: I1205 11:11:45.329559 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="906af090-57c1-4115-aa37-83ecc098261b" path="/var/lib/kubelet/pods/906af090-57c1-4115-aa37-83ecc098261b/volumes" Dec 05 11:11:49 crc kubenswrapper[5014]: I1205 11:11:49.670908 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 11:11:49 crc kubenswrapper[5014]: I1205 11:11:49.671471 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 11:11:49 crc kubenswrapper[5014]: I1205 11:11:49.675668 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 11:11:49 crc kubenswrapper[5014]: I1205 11:11:49.678657 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 11:11:50 crc kubenswrapper[5014]: I1205 11:11:50.637209 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 11:11:50 crc kubenswrapper[5014]: I1205 11:11:50.637855 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 11:11:50 crc kubenswrapper[5014]: I1205 11:11:50.637896 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 11:11:50 crc kubenswrapper[5014]: I1205 11:11:50.643088 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 11:11:51 crc kubenswrapper[5014]: I1205 11:11:51.219581 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 11:11:51 crc kubenswrapper[5014]: I1205 11:11:51.226849 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 11:12:00 crc kubenswrapper[5014]: I1205 11:12:00.808118 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:12:01 crc kubenswrapper[5014]: I1205 11:12:01.817395 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:12:02 crc kubenswrapper[5014]: I1205 11:12:02.937202 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:12:02 crc kubenswrapper[5014]: I1205 11:12:02.937299 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:12:04 crc kubenswrapper[5014]: I1205 11:12:04.957997 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" containerName="rabbitmq" containerID="cri-o://a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617" gracePeriod=604796 Dec 05 11:12:06 crc kubenswrapper[5014]: I1205 11:12:06.185796 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" containerName="rabbitmq" containerID="cri-o://9bf1e6076b04e4d05beeb349bbbbc834695a23b69f62d4d33127bd8770b6280c" gracePeriod=604796 Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.832520 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.925599 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-config-data\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.925768 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-server-conf\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.925834 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-tls\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.926887 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/909c5067-f4b6-4303-98e0-7f0763da52f9-pod-info\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.927032 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wq84j\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-kube-api-access-wq84j\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.927169 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-plugins-conf\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.927240 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-confd\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.927563 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-plugins\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.927641 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/909c5067-f4b6-4303-98e0-7f0763da52f9-erlang-cookie-secret\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.927681 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.927768 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-erlang-cookie\") pod \"909c5067-f4b6-4303-98e0-7f0763da52f9\" (UID: \"909c5067-f4b6-4303-98e0-7f0763da52f9\") " Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.930914 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.933235 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.936756 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.938022 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.938627 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/909c5067-f4b6-4303-98e0-7f0763da52f9-pod-info" (OuterVolumeSpecName: "pod-info") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.938784 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/909c5067-f4b6-4303-98e0-7f0763da52f9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.942872 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.962687 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-kube-api-access-wq84j" (OuterVolumeSpecName: "kube-api-access-wq84j") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "kube-api-access-wq84j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.969308 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-config-data" (OuterVolumeSpecName: "config-data") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:11 crc kubenswrapper[5014]: I1205 11:12:11.999840 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-server-conf" (OuterVolumeSpecName: "server-conf") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.030749 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.030779 5014 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.030789 5014 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.030797 5014 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/909c5067-f4b6-4303-98e0-7f0763da52f9-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.030807 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wq84j\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-kube-api-access-wq84j\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.030816 5014 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/909c5067-f4b6-4303-98e0-7f0763da52f9-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.030825 5014 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.030834 5014 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/909c5067-f4b6-4303-98e0-7f0763da52f9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.030856 5014 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.030866 5014 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.035302 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.062389 5014 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.071798 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "909c5067-f4b6-4303-98e0-7f0763da52f9" (UID: "909c5067-f4b6-4303-98e0-7f0763da52f9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.133342 5014 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/909c5067-f4b6-4303-98e0-7f0763da52f9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.133379 5014 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.410929 5014 generic.go:334] "Generic (PLEG): container finished" podID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" containerID="9bf1e6076b04e4d05beeb349bbbbc834695a23b69f62d4d33127bd8770b6280c" exitCode=0 Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.411021 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9e9300c9-3a44-43c1-bbe7-d0959a35eee1","Type":"ContainerDied","Data":"9bf1e6076b04e4d05beeb349bbbbc834695a23b69f62d4d33127bd8770b6280c"} Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.414741 5014 generic.go:334] "Generic (PLEG): container finished" podID="909c5067-f4b6-4303-98e0-7f0763da52f9" containerID="a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617" exitCode=0 Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.414839 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"909c5067-f4b6-4303-98e0-7f0763da52f9","Type":"ContainerDied","Data":"a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617"} Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.414872 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"909c5067-f4b6-4303-98e0-7f0763da52f9","Type":"ContainerDied","Data":"7c186f81c044a96e47cf1dc31c175f356cc731a3c030c8d6b3d929633cad5587"} Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.414890 5014 scope.go:117] "RemoveContainer" containerID="a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.415077 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.452876 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.457601 5014 scope.go:117] "RemoveContainer" containerID="1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.500658 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.512954 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:12:12 crc kubenswrapper[5014]: E1205 11:12:12.513722 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" containerName="rabbitmq" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.513753 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" containerName="rabbitmq" Dec 05 11:12:12 crc kubenswrapper[5014]: E1205 11:12:12.513778 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="906af090-57c1-4115-aa37-83ecc098261b" containerName="registry-server" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.513787 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="906af090-57c1-4115-aa37-83ecc098261b" containerName="registry-server" Dec 05 11:12:12 crc kubenswrapper[5014]: E1205 11:12:12.513803 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="906af090-57c1-4115-aa37-83ecc098261b" containerName="extract-content" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.513813 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="906af090-57c1-4115-aa37-83ecc098261b" containerName="extract-content" Dec 05 11:12:12 crc kubenswrapper[5014]: E1205 11:12:12.513853 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="906af090-57c1-4115-aa37-83ecc098261b" containerName="extract-utilities" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.513892 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="906af090-57c1-4115-aa37-83ecc098261b" containerName="extract-utilities" Dec 05 11:12:12 crc kubenswrapper[5014]: E1205 11:12:12.513934 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" containerName="setup-container" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.513942 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" containerName="setup-container" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.514202 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" containerName="rabbitmq" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.514222 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="906af090-57c1-4115-aa37-83ecc098261b" containerName="registry-server" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.516139 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.520138 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.520232 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.523810 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.523948 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.524057 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.524186 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.524339 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-dtjpt" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.526816 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.591467 5014 scope.go:117] "RemoveContainer" containerID="a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617" Dec 05 11:12:12 crc kubenswrapper[5014]: E1205 11:12:12.592368 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617\": container with ID starting with a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617 not found: ID does not exist" containerID="a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.592403 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617"} err="failed to get container status \"a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617\": rpc error: code = NotFound desc = could not find container \"a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617\": container with ID starting with a0ed07debf88ad3e2879c44f15bfc3d4f26d6b08eb7c538143d532a6d6371617 not found: ID does not exist" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.592428 5014 scope.go:117] "RemoveContainer" containerID="1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d" Dec 05 11:12:12 crc kubenswrapper[5014]: E1205 11:12:12.592714 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d\": container with ID starting with 1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d not found: ID does not exist" containerID="1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.592766 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d"} err="failed to get container status \"1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d\": rpc error: code = NotFound desc = could not find container \"1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d\": container with ID starting with 1b2cffeb1b29f76d086187c733f396869a838b8e0e11ffd9ffe12707f407631d not found: ID does not exist" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648449 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46830cc1-2cdb-48ad-86a0-159b73d805c3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648532 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648578 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648613 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648682 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46830cc1-2cdb-48ad-86a0-159b73d805c3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648704 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648730 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46830cc1-2cdb-48ad-86a0-159b73d805c3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648797 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46830cc1-2cdb-48ad-86a0-159b73d805c3-config-data\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648834 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648866 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x2jt\" (UniqueName: \"kubernetes.io/projected/46830cc1-2cdb-48ad-86a0-159b73d805c3-kube-api-access-9x2jt\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.648902 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46830cc1-2cdb-48ad-86a0-159b73d805c3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750523 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46830cc1-2cdb-48ad-86a0-159b73d805c3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750581 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46830cc1-2cdb-48ad-86a0-159b73d805c3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750626 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750652 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750678 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750731 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46830cc1-2cdb-48ad-86a0-159b73d805c3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750747 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750765 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46830cc1-2cdb-48ad-86a0-159b73d805c3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750810 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46830cc1-2cdb-48ad-86a0-159b73d805c3-config-data\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750837 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.750863 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x2jt\" (UniqueName: \"kubernetes.io/projected/46830cc1-2cdb-48ad-86a0-159b73d805c3-kube-api-access-9x2jt\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.751439 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.751778 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46830cc1-2cdb-48ad-86a0-159b73d805c3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.752003 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.752008 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.752415 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46830cc1-2cdb-48ad-86a0-159b73d805c3-config-data\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.753245 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46830cc1-2cdb-48ad-86a0-159b73d805c3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.756530 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.757106 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46830cc1-2cdb-48ad-86a0-159b73d805c3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.757761 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46830cc1-2cdb-48ad-86a0-159b73d805c3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.767802 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46830cc1-2cdb-48ad-86a0-159b73d805c3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.774226 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x2jt\" (UniqueName: \"kubernetes.io/projected/46830cc1-2cdb-48ad-86a0-159b73d805c3-kube-api-access-9x2jt\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.794649 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"46830cc1-2cdb-48ad-86a0-159b73d805c3\") " pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.853078 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.897772 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953256 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-server-conf\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953350 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-erlang-cookie\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953391 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953489 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-pod-info\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953607 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72dqf\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-kube-api-access-72dqf\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953638 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-tls\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953676 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-erlang-cookie-secret\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953715 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-plugins\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953755 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-config-data\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953918 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-plugins-conf\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.953975 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-confd\") pod \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\" (UID: \"9e9300c9-3a44-43c1-bbe7-d0959a35eee1\") " Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.954496 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.954615 5014 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.957352 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.958883 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-pod-info" (OuterVolumeSpecName: "pod-info") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.960349 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.961975 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-kube-api-access-72dqf" (OuterVolumeSpecName: "kube-api-access-72dqf") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "kube-api-access-72dqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.962817 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.963255 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:12 crc kubenswrapper[5014]: I1205 11:12:12.968542 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.027084 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-server-conf" (OuterVolumeSpecName: "server-conf") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.035353 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-config-data" (OuterVolumeSpecName: "config-data") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.056762 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72dqf\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-kube-api-access-72dqf\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.056792 5014 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.056803 5014 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.056811 5014 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.056822 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.056830 5014 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.056841 5014 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.056871 5014 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.056882 5014 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.087237 5014 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.087384 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "9e9300c9-3a44-43c1-bbe7-d0959a35eee1" (UID: "9e9300c9-3a44-43c1-bbe7-d0959a35eee1"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.158478 5014 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9e9300c9-3a44-43c1-bbe7-d0959a35eee1-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.158810 5014 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.330361 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" path="/var/lib/kubelet/pods/909c5067-f4b6-4303-98e0-7f0763da52f9/volumes" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.449375 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.482954 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9e9300c9-3a44-43c1-bbe7-d0959a35eee1","Type":"ContainerDied","Data":"a116254899486103a8b79f5c5d89c8079b39ea20e5d37007390d55eb7ea00076"} Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.483019 5014 scope.go:117] "RemoveContainer" containerID="9bf1e6076b04e4d05beeb349bbbbc834695a23b69f62d4d33127bd8770b6280c" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.483246 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.631895 5014 scope.go:117] "RemoveContainer" containerID="f439f7814c3c9edd6130fc7a0818b8bbbf080786b75946951491dae70a49d44d" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.693593 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.714485 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.728912 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:12:13 crc kubenswrapper[5014]: E1205 11:12:13.730742 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" containerName="setup-container" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.730778 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" containerName="setup-container" Dec 05 11:12:13 crc kubenswrapper[5014]: E1205 11:12:13.730805 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" containerName="rabbitmq" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.730813 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" containerName="rabbitmq" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.731074 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" containerName="rabbitmq" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.735047 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.739923 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.741824 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.742080 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.742334 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.742585 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.743573 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.743742 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.743909 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-gvntn" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.797630 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrt9c\" (UniqueName: \"kubernetes.io/projected/b16a0ee0-c10b-41b2-a636-4b066b470df6-kube-api-access-lrt9c\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.797690 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.797720 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b16a0ee0-c10b-41b2-a636-4b066b470df6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.797769 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b16a0ee0-c10b-41b2-a636-4b066b470df6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.797815 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.797856 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b16a0ee0-c10b-41b2-a636-4b066b470df6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.797876 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.797909 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.797940 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b16a0ee0-c10b-41b2-a636-4b066b470df6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.797974 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b16a0ee0-c10b-41b2-a636-4b066b470df6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.798048 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.899572 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b16a0ee0-c10b-41b2-a636-4b066b470df6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.900481 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.900646 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b16a0ee0-c10b-41b2-a636-4b066b470df6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.900784 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.900950 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.901079 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b16a0ee0-c10b-41b2-a636-4b066b470df6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.901204 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b16a0ee0-c10b-41b2-a636-4b066b470df6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.901421 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.901626 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrt9c\" (UniqueName: \"kubernetes.io/projected/b16a0ee0-c10b-41b2-a636-4b066b470df6-kube-api-access-lrt9c\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.901753 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.901873 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b16a0ee0-c10b-41b2-a636-4b066b470df6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.901876 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.902105 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b16a0ee0-c10b-41b2-a636-4b066b470df6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.902430 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.900847 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.902692 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b16a0ee0-c10b-41b2-a636-4b066b470df6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.905291 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.905620 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b16a0ee0-c10b-41b2-a636-4b066b470df6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.905707 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b16a0ee0-c10b-41b2-a636-4b066b470df6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.905760 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b16a0ee0-c10b-41b2-a636-4b066b470df6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.919720 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b16a0ee0-c10b-41b2-a636-4b066b470df6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.920193 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrt9c\" (UniqueName: \"kubernetes.io/projected/b16a0ee0-c10b-41b2-a636-4b066b470df6-kube-api-access-lrt9c\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:13 crc kubenswrapper[5014]: I1205 11:12:13.952896 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b16a0ee0-c10b-41b2-a636-4b066b470df6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:14 crc kubenswrapper[5014]: I1205 11:12:14.064877 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:14 crc kubenswrapper[5014]: I1205 11:12:14.503294 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46830cc1-2cdb-48ad-86a0-159b73d805c3","Type":"ContainerStarted","Data":"b26bf1f07246aa7a1297aa473a0fa222bdff7140d4e9e964e1adb658ed218ea1"} Dec 05 11:12:14 crc kubenswrapper[5014]: I1205 11:12:14.529205 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.226705 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-xgmn5"] Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.229377 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.233386 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.242195 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-xgmn5"] Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.329536 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.329621 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-svc\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.329643 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m65w8\" (UniqueName: \"kubernetes.io/projected/7a7de019-7f78-4e56-bf30-6eb8562a05aa-kube-api-access-m65w8\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.329663 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-config\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.329702 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.329720 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.329759 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.333898 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e9300c9-3a44-43c1-bbe7-d0959a35eee1" path="/var/lib/kubelet/pods/9e9300c9-3a44-43c1-bbe7-d0959a35eee1/volumes" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.334736 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-xgmn5"] Dec 05 11:12:15 crc kubenswrapper[5014]: E1205 11:12:15.335396 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-m65w8 openstack-edpm-ipam ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" podUID="7a7de019-7f78-4e56-bf30-6eb8562a05aa" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.352516 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-k5tgq"] Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.354334 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.365588 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-k5tgq"] Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431308 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431392 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431454 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431477 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431499 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431531 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431552 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431601 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-config\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431626 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-svc\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431685 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m65w8\" (UniqueName: \"kubernetes.io/projected/7a7de019-7f78-4e56-bf30-6eb8562a05aa-kube-api-access-m65w8\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431704 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-config\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431724 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvvsm\" (UniqueName: \"kubernetes.io/projected/d8a409e0-f594-4164-950f-c1285bf165af-kube-api-access-xvvsm\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431781 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.431801 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.435532 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.436125 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.436343 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.436376 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.436986 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-config\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.437772 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-svc\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.457664 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m65w8\" (UniqueName: \"kubernetes.io/projected/7a7de019-7f78-4e56-bf30-6eb8562a05aa-kube-api-access-m65w8\") pod \"dnsmasq-dns-5576978c7c-xgmn5\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.520945 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b16a0ee0-c10b-41b2-a636-4b066b470df6","Type":"ContainerStarted","Data":"3e1405f9e1457ed942e214971c0260fa78eaf2772f772b7227583f8d0afa5446"} Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.521023 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.533741 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-config\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.533838 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvvsm\" (UniqueName: \"kubernetes.io/projected/d8a409e0-f594-4164-950f-c1285bf165af-kube-api-access-xvvsm\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.533929 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.533984 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.534002 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.534073 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.534100 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.535183 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.535239 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.535388 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.535444 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.535964 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.535990 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8a409e0-f594-4164-950f-c1285bf165af-config\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.537935 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.584149 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvvsm\" (UniqueName: \"kubernetes.io/projected/d8a409e0-f594-4164-950f-c1285bf165af-kube-api-access-xvvsm\") pod \"dnsmasq-dns-8c6f6df99-k5tgq\" (UID: \"d8a409e0-f594-4164-950f-c1285bf165af\") " pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.635998 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-config\") pod \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.636242 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-sb\") pod \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.636362 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-openstack-edpm-ipam\") pod \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.636476 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-svc\") pod \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.636515 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-swift-storage-0\") pod \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.636605 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-config" (OuterVolumeSpecName: "config") pod "7a7de019-7f78-4e56-bf30-6eb8562a05aa" (UID: "7a7de019-7f78-4e56-bf30-6eb8562a05aa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.636747 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-nb\") pod \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.636799 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m65w8\" (UniqueName: \"kubernetes.io/projected/7a7de019-7f78-4e56-bf30-6eb8562a05aa-kube-api-access-m65w8\") pod \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\" (UID: \"7a7de019-7f78-4e56-bf30-6eb8562a05aa\") " Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.636786 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7a7de019-7f78-4e56-bf30-6eb8562a05aa" (UID: "7a7de019-7f78-4e56-bf30-6eb8562a05aa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.636866 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "7a7de019-7f78-4e56-bf30-6eb8562a05aa" (UID: "7a7de019-7f78-4e56-bf30-6eb8562a05aa"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.637024 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7a7de019-7f78-4e56-bf30-6eb8562a05aa" (UID: "7a7de019-7f78-4e56-bf30-6eb8562a05aa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.637055 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7a7de019-7f78-4e56-bf30-6eb8562a05aa" (UID: "7a7de019-7f78-4e56-bf30-6eb8562a05aa"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.637188 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7a7de019-7f78-4e56-bf30-6eb8562a05aa" (UID: "7a7de019-7f78-4e56-bf30-6eb8562a05aa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.638043 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.638074 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.638087 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.638101 5014 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.638114 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.638126 5014 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a7de019-7f78-4e56-bf30-6eb8562a05aa-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.644946 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a7de019-7f78-4e56-bf30-6eb8562a05aa-kube-api-access-m65w8" (OuterVolumeSpecName: "kube-api-access-m65w8") pod "7a7de019-7f78-4e56-bf30-6eb8562a05aa" (UID: "7a7de019-7f78-4e56-bf30-6eb8562a05aa"). InnerVolumeSpecName "kube-api-access-m65w8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.687857 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:15 crc kubenswrapper[5014]: I1205 11:12:15.740648 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m65w8\" (UniqueName: \"kubernetes.io/projected/7a7de019-7f78-4e56-bf30-6eb8562a05aa-kube-api-access-m65w8\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:16 crc kubenswrapper[5014]: I1205 11:12:16.184371 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-k5tgq"] Dec 05 11:12:16 crc kubenswrapper[5014]: I1205 11:12:16.536666 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46830cc1-2cdb-48ad-86a0-159b73d805c3","Type":"ContainerStarted","Data":"22493bcc025d5d34b066f39a0258ef0278fc9eef4f118a4e67c1a706a3924a6c"} Dec 05 11:12:16 crc kubenswrapper[5014]: I1205 11:12:16.538770 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-xgmn5" Dec 05 11:12:16 crc kubenswrapper[5014]: I1205 11:12:16.538702 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" event={"ID":"d8a409e0-f594-4164-950f-c1285bf165af","Type":"ContainerStarted","Data":"4a29a9fdfd35691850b4ae3c09c1b600d9d527f1ee38cf0c77bbbcaa86f576aa"} Dec 05 11:12:16 crc kubenswrapper[5014]: I1205 11:12:16.605782 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-xgmn5"] Dec 05 11:12:16 crc kubenswrapper[5014]: I1205 11:12:16.615886 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-xgmn5"] Dec 05 11:12:16 crc kubenswrapper[5014]: I1205 11:12:16.718652 5014 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="909c5067-f4b6-4303-98e0-7f0763da52f9" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.97:5671: i/o timeout" Dec 05 11:12:17 crc kubenswrapper[5014]: I1205 11:12:17.328881 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a7de019-7f78-4e56-bf30-6eb8562a05aa" path="/var/lib/kubelet/pods/7a7de019-7f78-4e56-bf30-6eb8562a05aa/volumes" Dec 05 11:12:17 crc kubenswrapper[5014]: I1205 11:12:17.549340 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b16a0ee0-c10b-41b2-a636-4b066b470df6","Type":"ContainerStarted","Data":"e59df2552962e759778882a1fdb3518659f93ee4c41475b878ca49389b78f244"} Dec 05 11:12:17 crc kubenswrapper[5014]: I1205 11:12:17.552169 5014 generic.go:334] "Generic (PLEG): container finished" podID="d8a409e0-f594-4164-950f-c1285bf165af" containerID="e7a7975d590808a56c89b7eb4d225a9d1ae8994d6bf9cf58bb0917b07741e77a" exitCode=0 Dec 05 11:12:17 crc kubenswrapper[5014]: I1205 11:12:17.552217 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" event={"ID":"d8a409e0-f594-4164-950f-c1285bf165af","Type":"ContainerDied","Data":"e7a7975d590808a56c89b7eb4d225a9d1ae8994d6bf9cf58bb0917b07741e77a"} Dec 05 11:12:18 crc kubenswrapper[5014]: I1205 11:12:18.562961 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" event={"ID":"d8a409e0-f594-4164-950f-c1285bf165af","Type":"ContainerStarted","Data":"38858b495d3c46d96ea4c70be235955e9d59678e65d1a9ed8200f1a1ebeae985"} Dec 05 11:12:18 crc kubenswrapper[5014]: I1205 11:12:18.593947 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" podStartSLOduration=3.593923592 podStartE2EDuration="3.593923592s" podCreationTimestamp="2025-12-05 11:12:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:12:18.585639099 +0000 UTC m=+1465.533756823" watchObservedRunningTime="2025-12-05 11:12:18.593923592 +0000 UTC m=+1465.542041296" Dec 05 11:12:19 crc kubenswrapper[5014]: I1205 11:12:19.570234 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:25 crc kubenswrapper[5014]: I1205 11:12:25.690505 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8c6f6df99-k5tgq" Dec 05 11:12:25 crc kubenswrapper[5014]: I1205 11:12:25.767297 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-nv7fd"] Dec 05 11:12:25 crc kubenswrapper[5014]: I1205 11:12:25.767599 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" podUID="5e9b17e6-f16e-4370-9278-61584a2b96a4" containerName="dnsmasq-dns" containerID="cri-o://0eedc8b3521cfb61af67c39600b8661a2045966cbe3d4ccb6a8d36220c715b6a" gracePeriod=10 Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.646544 5014 generic.go:334] "Generic (PLEG): container finished" podID="5e9b17e6-f16e-4370-9278-61584a2b96a4" containerID="0eedc8b3521cfb61af67c39600b8661a2045966cbe3d4ccb6a8d36220c715b6a" exitCode=0 Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.646641 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" event={"ID":"5e9b17e6-f16e-4370-9278-61584a2b96a4","Type":"ContainerDied","Data":"0eedc8b3521cfb61af67c39600b8661a2045966cbe3d4ccb6a8d36220c715b6a"} Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.772100 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.858525 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-swift-storage-0\") pod \"5e9b17e6-f16e-4370-9278-61584a2b96a4\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.858584 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkclt\" (UniqueName: \"kubernetes.io/projected/5e9b17e6-f16e-4370-9278-61584a2b96a4-kube-api-access-jkclt\") pod \"5e9b17e6-f16e-4370-9278-61584a2b96a4\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.858792 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-nb\") pod \"5e9b17e6-f16e-4370-9278-61584a2b96a4\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.858860 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-config\") pod \"5e9b17e6-f16e-4370-9278-61584a2b96a4\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.858946 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-sb\") pod \"5e9b17e6-f16e-4370-9278-61584a2b96a4\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.859371 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-svc\") pod \"5e9b17e6-f16e-4370-9278-61584a2b96a4\" (UID: \"5e9b17e6-f16e-4370-9278-61584a2b96a4\") " Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.869414 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e9b17e6-f16e-4370-9278-61584a2b96a4-kube-api-access-jkclt" (OuterVolumeSpecName: "kube-api-access-jkclt") pod "5e9b17e6-f16e-4370-9278-61584a2b96a4" (UID: "5e9b17e6-f16e-4370-9278-61584a2b96a4"). InnerVolumeSpecName "kube-api-access-jkclt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.917084 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5e9b17e6-f16e-4370-9278-61584a2b96a4" (UID: "5e9b17e6-f16e-4370-9278-61584a2b96a4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.918008 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5e9b17e6-f16e-4370-9278-61584a2b96a4" (UID: "5e9b17e6-f16e-4370-9278-61584a2b96a4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.920989 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5e9b17e6-f16e-4370-9278-61584a2b96a4" (UID: "5e9b17e6-f16e-4370-9278-61584a2b96a4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.930918 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5e9b17e6-f16e-4370-9278-61584a2b96a4" (UID: "5e9b17e6-f16e-4370-9278-61584a2b96a4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.931033 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-config" (OuterVolumeSpecName: "config") pod "5e9b17e6-f16e-4370-9278-61584a2b96a4" (UID: "5e9b17e6-f16e-4370-9278-61584a2b96a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.961384 5014 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.961418 5014 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.961428 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkclt\" (UniqueName: \"kubernetes.io/projected/5e9b17e6-f16e-4370-9278-61584a2b96a4-kube-api-access-jkclt\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.961437 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.961446 5014 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:26 crc kubenswrapper[5014]: I1205 11:12:26.961454 5014 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e9b17e6-f16e-4370-9278-61584a2b96a4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:27 crc kubenswrapper[5014]: I1205 11:12:27.657315 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" event={"ID":"5e9b17e6-f16e-4370-9278-61584a2b96a4","Type":"ContainerDied","Data":"e306871ffe6fd2ae9cf686f8df89cc512f0e0d2942274c97a3dd43d9fd8fd63f"} Dec 05 11:12:27 crc kubenswrapper[5014]: I1205 11:12:27.657372 5014 scope.go:117] "RemoveContainer" containerID="0eedc8b3521cfb61af67c39600b8661a2045966cbe3d4ccb6a8d36220c715b6a" Dec 05 11:12:27 crc kubenswrapper[5014]: I1205 11:12:27.657371 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-nv7fd" Dec 05 11:12:27 crc kubenswrapper[5014]: I1205 11:12:27.681651 5014 scope.go:117] "RemoveContainer" containerID="ecf2ff395c965b23064519d1deed5476eb4476d8a9765de92e733830dac78e76" Dec 05 11:12:27 crc kubenswrapper[5014]: I1205 11:12:27.686047 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-nv7fd"] Dec 05 11:12:27 crc kubenswrapper[5014]: I1205 11:12:27.695339 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-nv7fd"] Dec 05 11:12:29 crc kubenswrapper[5014]: I1205 11:12:29.329021 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e9b17e6-f16e-4370-9278-61584a2b96a4" path="/var/lib/kubelet/pods/5e9b17e6-f16e-4370-9278-61584a2b96a4/volumes" Dec 05 11:12:32 crc kubenswrapper[5014]: I1205 11:12:32.937109 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:12:32 crc kubenswrapper[5014]: I1205 11:12:32.937725 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:12:32 crc kubenswrapper[5014]: I1205 11:12:32.937774 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:12:32 crc kubenswrapper[5014]: I1205 11:12:32.938655 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0d6d61cc21a88a778a6896c0ce3a742c000804cdd014c81b67f82fc215c25138"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:12:32 crc kubenswrapper[5014]: I1205 11:12:32.938727 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://0d6d61cc21a88a778a6896c0ce3a742c000804cdd014c81b67f82fc215c25138" gracePeriod=600 Dec 05 11:12:33 crc kubenswrapper[5014]: I1205 11:12:33.717131 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="0d6d61cc21a88a778a6896c0ce3a742c000804cdd014c81b67f82fc215c25138" exitCode=0 Dec 05 11:12:33 crc kubenswrapper[5014]: I1205 11:12:33.717162 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"0d6d61cc21a88a778a6896c0ce3a742c000804cdd014c81b67f82fc215c25138"} Dec 05 11:12:33 crc kubenswrapper[5014]: I1205 11:12:33.717503 5014 scope.go:117] "RemoveContainer" containerID="e69f8ff3539ebba47e81dc1689f38b27a404e4706e334acbe1fa267156045c14" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.467625 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9"] Dec 05 11:12:34 crc kubenswrapper[5014]: E1205 11:12:34.468846 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e9b17e6-f16e-4370-9278-61584a2b96a4" containerName="init" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.468869 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e9b17e6-f16e-4370-9278-61584a2b96a4" containerName="init" Dec 05 11:12:34 crc kubenswrapper[5014]: E1205 11:12:34.468903 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e9b17e6-f16e-4370-9278-61584a2b96a4" containerName="dnsmasq-dns" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.468911 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e9b17e6-f16e-4370-9278-61584a2b96a4" containerName="dnsmasq-dns" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.469165 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e9b17e6-f16e-4370-9278-61584a2b96a4" containerName="dnsmasq-dns" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.470069 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.479849 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.479986 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.480511 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.481653 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.494669 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9"] Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.618240 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.618310 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.618339 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drcpm\" (UniqueName: \"kubernetes.io/projected/6a880be3-7a1f-4e62-9603-9469947923ce-kube-api-access-drcpm\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.618663 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.720424 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.720586 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.720623 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.720658 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drcpm\" (UniqueName: \"kubernetes.io/projected/6a880be3-7a1f-4e62-9603-9469947923ce-kube-api-access-drcpm\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.727539 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.729722 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889"} Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.729995 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.731260 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.741306 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drcpm\" (UniqueName: \"kubernetes.io/projected/6a880be3-7a1f-4e62-9603-9469947923ce-kube-api-access-drcpm\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:34 crc kubenswrapper[5014]: I1205 11:12:34.791611 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:35 crc kubenswrapper[5014]: I1205 11:12:35.332003 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9"] Dec 05 11:12:35 crc kubenswrapper[5014]: W1205 11:12:35.334096 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a880be3_7a1f_4e62_9603_9469947923ce.slice/crio-5fb56d67a4fab2a9cd7e1d8d31aa1bafc756d3d1a45e134a5b685da792851a0b WatchSource:0}: Error finding container 5fb56d67a4fab2a9cd7e1d8d31aa1bafc756d3d1a45e134a5b685da792851a0b: Status 404 returned error can't find the container with id 5fb56d67a4fab2a9cd7e1d8d31aa1bafc756d3d1a45e134a5b685da792851a0b Dec 05 11:12:35 crc kubenswrapper[5014]: I1205 11:12:35.746519 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" event={"ID":"6a880be3-7a1f-4e62-9603-9469947923ce","Type":"ContainerStarted","Data":"5fb56d67a4fab2a9cd7e1d8d31aa1bafc756d3d1a45e134a5b685da792851a0b"} Dec 05 11:12:44 crc kubenswrapper[5014]: I1205 11:12:44.849629 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" event={"ID":"6a880be3-7a1f-4e62-9603-9469947923ce","Type":"ContainerStarted","Data":"3e1e7812b29b7418cf3ad0dbda21bdea24e632c89680dd62b58e96e51caca922"} Dec 05 11:12:48 crc kubenswrapper[5014]: I1205 11:12:48.883337 5014 generic.go:334] "Generic (PLEG): container finished" podID="46830cc1-2cdb-48ad-86a0-159b73d805c3" containerID="22493bcc025d5d34b066f39a0258ef0278fc9eef4f118a4e67c1a706a3924a6c" exitCode=0 Dec 05 11:12:48 crc kubenswrapper[5014]: I1205 11:12:48.883396 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46830cc1-2cdb-48ad-86a0-159b73d805c3","Type":"ContainerDied","Data":"22493bcc025d5d34b066f39a0258ef0278fc9eef4f118a4e67c1a706a3924a6c"} Dec 05 11:12:48 crc kubenswrapper[5014]: I1205 11:12:48.914491 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" podStartSLOduration=6.416776915 podStartE2EDuration="14.914474085s" podCreationTimestamp="2025-12-05 11:12:34 +0000 UTC" firstStartedPulling="2025-12-05 11:12:35.336504679 +0000 UTC m=+1482.284622383" lastFinishedPulling="2025-12-05 11:12:43.834201849 +0000 UTC m=+1490.782319553" observedRunningTime="2025-12-05 11:12:44.8698374 +0000 UTC m=+1491.817955104" watchObservedRunningTime="2025-12-05 11:12:48.914474085 +0000 UTC m=+1495.862591789" Dec 05 11:12:49 crc kubenswrapper[5014]: I1205 11:12:49.894090 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46830cc1-2cdb-48ad-86a0-159b73d805c3","Type":"ContainerStarted","Data":"ed7456c5a85868020339c14b8171cf58b0f1496a116caca56cd2c671f70cdb15"} Dec 05 11:12:49 crc kubenswrapper[5014]: I1205 11:12:49.894640 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 11:12:49 crc kubenswrapper[5014]: I1205 11:12:49.895870 5014 generic.go:334] "Generic (PLEG): container finished" podID="b16a0ee0-c10b-41b2-a636-4b066b470df6" containerID="e59df2552962e759778882a1fdb3518659f93ee4c41475b878ca49389b78f244" exitCode=0 Dec 05 11:12:49 crc kubenswrapper[5014]: I1205 11:12:49.895897 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b16a0ee0-c10b-41b2-a636-4b066b470df6","Type":"ContainerDied","Data":"e59df2552962e759778882a1fdb3518659f93ee4c41475b878ca49389b78f244"} Dec 05 11:12:49 crc kubenswrapper[5014]: I1205 11:12:49.927774 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.927748464 podStartE2EDuration="37.927748464s" podCreationTimestamp="2025-12-05 11:12:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:12:49.914031739 +0000 UTC m=+1496.862149453" watchObservedRunningTime="2025-12-05 11:12:49.927748464 +0000 UTC m=+1496.875866168" Dec 05 11:12:50 crc kubenswrapper[5014]: I1205 11:12:50.907932 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b16a0ee0-c10b-41b2-a636-4b066b470df6","Type":"ContainerStarted","Data":"81dcfaeb55e469ba2dec157c202b874608b7e0c11994ffe6e972a6719aee462e"} Dec 05 11:12:50 crc kubenswrapper[5014]: I1205 11:12:50.908457 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:12:50 crc kubenswrapper[5014]: I1205 11:12:50.940389 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.940370527 podStartE2EDuration="37.940370527s" podCreationTimestamp="2025-12-05 11:12:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:12:50.930800753 +0000 UTC m=+1497.878918477" watchObservedRunningTime="2025-12-05 11:12:50.940370527 +0000 UTC m=+1497.888488231" Dec 05 11:12:56 crc kubenswrapper[5014]: I1205 11:12:56.960940 5014 generic.go:334] "Generic (PLEG): container finished" podID="6a880be3-7a1f-4e62-9603-9469947923ce" containerID="3e1e7812b29b7418cf3ad0dbda21bdea24e632c89680dd62b58e96e51caca922" exitCode=0 Dec 05 11:12:56 crc kubenswrapper[5014]: I1205 11:12:56.961026 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" event={"ID":"6a880be3-7a1f-4e62-9603-9469947923ce","Type":"ContainerDied","Data":"3e1e7812b29b7418cf3ad0dbda21bdea24e632c89680dd62b58e96e51caca922"} Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.401813 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.529841 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-ssh-key\") pod \"6a880be3-7a1f-4e62-9603-9469947923ce\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.530214 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-inventory\") pod \"6a880be3-7a1f-4e62-9603-9469947923ce\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.530298 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-repo-setup-combined-ca-bundle\") pod \"6a880be3-7a1f-4e62-9603-9469947923ce\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.530341 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drcpm\" (UniqueName: \"kubernetes.io/projected/6a880be3-7a1f-4e62-9603-9469947923ce-kube-api-access-drcpm\") pod \"6a880be3-7a1f-4e62-9603-9469947923ce\" (UID: \"6a880be3-7a1f-4e62-9603-9469947923ce\") " Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.535295 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a880be3-7a1f-4e62-9603-9469947923ce-kube-api-access-drcpm" (OuterVolumeSpecName: "kube-api-access-drcpm") pod "6a880be3-7a1f-4e62-9603-9469947923ce" (UID: "6a880be3-7a1f-4e62-9603-9469947923ce"). InnerVolumeSpecName "kube-api-access-drcpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.535860 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "6a880be3-7a1f-4e62-9603-9469947923ce" (UID: "6a880be3-7a1f-4e62-9603-9469947923ce"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.558608 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6a880be3-7a1f-4e62-9603-9469947923ce" (UID: "6a880be3-7a1f-4e62-9603-9469947923ce"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.560423 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-inventory" (OuterVolumeSpecName: "inventory") pod "6a880be3-7a1f-4e62-9603-9469947923ce" (UID: "6a880be3-7a1f-4e62-9603-9469947923ce"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.632695 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.632750 5014 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.632771 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drcpm\" (UniqueName: \"kubernetes.io/projected/6a880be3-7a1f-4e62-9603-9469947923ce-kube-api-access-drcpm\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:58 crc kubenswrapper[5014]: I1205 11:12:58.632793 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a880be3-7a1f-4e62-9603-9469947923ce-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.014630 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" event={"ID":"6a880be3-7a1f-4e62-9603-9469947923ce","Type":"ContainerDied","Data":"5fb56d67a4fab2a9cd7e1d8d31aa1bafc756d3d1a45e134a5b685da792851a0b"} Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.014677 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5fb56d67a4fab2a9cd7e1d8d31aa1bafc756d3d1a45e134a5b685da792851a0b" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.014731 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.071613 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv"] Dec 05 11:12:59 crc kubenswrapper[5014]: E1205 11:12:59.072026 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a880be3-7a1f-4e62-9603-9469947923ce" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.072047 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a880be3-7a1f-4e62-9603-9469947923ce" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.072241 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a880be3-7a1f-4e62-9603-9469947923ce" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.072948 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.075300 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.075422 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.075695 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.077095 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.087773 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv"] Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.141054 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-zm7rv\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.141177 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-zm7rv\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.141213 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vshvb\" (UniqueName: \"kubernetes.io/projected/6eac20dd-3e47-46e2-91fd-c684094b8d74-kube-api-access-vshvb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-zm7rv\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.243718 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-zm7rv\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.243872 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-zm7rv\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.243907 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vshvb\" (UniqueName: \"kubernetes.io/projected/6eac20dd-3e47-46e2-91fd-c684094b8d74-kube-api-access-vshvb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-zm7rv\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.247436 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-zm7rv\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.247826 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-zm7rv\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.262264 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vshvb\" (UniqueName: \"kubernetes.io/projected/6eac20dd-3e47-46e2-91fd-c684094b8d74-kube-api-access-vshvb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-zm7rv\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:12:59 crc kubenswrapper[5014]: I1205 11:12:59.390905 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:13:00 crc kubenswrapper[5014]: I1205 11:13:00.259034 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv"] Dec 05 11:13:01 crc kubenswrapper[5014]: I1205 11:13:01.040807 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" event={"ID":"6eac20dd-3e47-46e2-91fd-c684094b8d74","Type":"ContainerStarted","Data":"d58e9dc93efed4a0ac798fcd2b6137884e0b308bc7f4438f48ebc3bce492098a"} Dec 05 11:13:02 crc kubenswrapper[5014]: I1205 11:13:02.052572 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" event={"ID":"6eac20dd-3e47-46e2-91fd-c684094b8d74","Type":"ContainerStarted","Data":"41e08dad5e5079b50ac6e75ee368b009fc48410f0a12ad1a43664c0b99f3d833"} Dec 05 11:13:02 crc kubenswrapper[5014]: I1205 11:13:02.076079 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" podStartSLOduration=2.438300775 podStartE2EDuration="3.076061573s" podCreationTimestamp="2025-12-05 11:12:59 +0000 UTC" firstStartedPulling="2025-12-05 11:13:00.267097009 +0000 UTC m=+1507.215214713" lastFinishedPulling="2025-12-05 11:13:00.904857807 +0000 UTC m=+1507.852975511" observedRunningTime="2025-12-05 11:13:02.073730165 +0000 UTC m=+1509.021847879" watchObservedRunningTime="2025-12-05 11:13:02.076061573 +0000 UTC m=+1509.024179277" Dec 05 11:13:02 crc kubenswrapper[5014]: I1205 11:13:02.906363 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 11:13:03 crc kubenswrapper[5014]: I1205 11:13:03.457011 5014 scope.go:117] "RemoveContainer" containerID="fa5598192edba6c6f74c8dec204a252c425ede2d7006b6d170640d7400d6273d" Dec 05 11:13:04 crc kubenswrapper[5014]: I1205 11:13:04.067730 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 11:13:04 crc kubenswrapper[5014]: I1205 11:13:04.072520 5014 generic.go:334] "Generic (PLEG): container finished" podID="6eac20dd-3e47-46e2-91fd-c684094b8d74" containerID="41e08dad5e5079b50ac6e75ee368b009fc48410f0a12ad1a43664c0b99f3d833" exitCode=0 Dec 05 11:13:04 crc kubenswrapper[5014]: I1205 11:13:04.072561 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" event={"ID":"6eac20dd-3e47-46e2-91fd-c684094b8d74","Type":"ContainerDied","Data":"41e08dad5e5079b50ac6e75ee368b009fc48410f0a12ad1a43664c0b99f3d833"} Dec 05 11:13:05 crc kubenswrapper[5014]: I1205 11:13:05.531263 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:13:05 crc kubenswrapper[5014]: I1205 11:13:05.689045 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-inventory\") pod \"6eac20dd-3e47-46e2-91fd-c684094b8d74\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " Dec 05 11:13:05 crc kubenswrapper[5014]: I1205 11:13:05.689142 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-ssh-key\") pod \"6eac20dd-3e47-46e2-91fd-c684094b8d74\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " Dec 05 11:13:05 crc kubenswrapper[5014]: I1205 11:13:05.689303 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vshvb\" (UniqueName: \"kubernetes.io/projected/6eac20dd-3e47-46e2-91fd-c684094b8d74-kube-api-access-vshvb\") pod \"6eac20dd-3e47-46e2-91fd-c684094b8d74\" (UID: \"6eac20dd-3e47-46e2-91fd-c684094b8d74\") " Dec 05 11:13:05 crc kubenswrapper[5014]: I1205 11:13:05.694843 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eac20dd-3e47-46e2-91fd-c684094b8d74-kube-api-access-vshvb" (OuterVolumeSpecName: "kube-api-access-vshvb") pod "6eac20dd-3e47-46e2-91fd-c684094b8d74" (UID: "6eac20dd-3e47-46e2-91fd-c684094b8d74"). InnerVolumeSpecName "kube-api-access-vshvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:13:05 crc kubenswrapper[5014]: I1205 11:13:05.716840 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6eac20dd-3e47-46e2-91fd-c684094b8d74" (UID: "6eac20dd-3e47-46e2-91fd-c684094b8d74"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:05 crc kubenswrapper[5014]: I1205 11:13:05.717249 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-inventory" (OuterVolumeSpecName: "inventory") pod "6eac20dd-3e47-46e2-91fd-c684094b8d74" (UID: "6eac20dd-3e47-46e2-91fd-c684094b8d74"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:13:05 crc kubenswrapper[5014]: I1205 11:13:05.794081 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:05 crc kubenswrapper[5014]: I1205 11:13:05.794116 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6eac20dd-3e47-46e2-91fd-c684094b8d74-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:05 crc kubenswrapper[5014]: I1205 11:13:05.794126 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vshvb\" (UniqueName: \"kubernetes.io/projected/6eac20dd-3e47-46e2-91fd-c684094b8d74-kube-api-access-vshvb\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.093147 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" event={"ID":"6eac20dd-3e47-46e2-91fd-c684094b8d74","Type":"ContainerDied","Data":"d58e9dc93efed4a0ac798fcd2b6137884e0b308bc7f4438f48ebc3bce492098a"} Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.093189 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d58e9dc93efed4a0ac798fcd2b6137884e0b308bc7f4438f48ebc3bce492098a" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.093239 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-zm7rv" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.182240 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr"] Dec 05 11:13:06 crc kubenswrapper[5014]: E1205 11:13:06.183000 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eac20dd-3e47-46e2-91fd-c684094b8d74" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.183036 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eac20dd-3e47-46e2-91fd-c684094b8d74" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.183359 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eac20dd-3e47-46e2-91fd-c684094b8d74" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.184534 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.187160 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.187212 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.187674 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.187729 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.201803 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.201851 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqljb\" (UniqueName: \"kubernetes.io/projected/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-kube-api-access-nqljb\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.201872 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.202022 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.204239 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr"] Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.303637 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.303821 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.303898 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.303922 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqljb\" (UniqueName: \"kubernetes.io/projected/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-kube-api-access-nqljb\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.307812 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.308340 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.309120 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.321399 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqljb\" (UniqueName: \"kubernetes.io/projected/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-kube-api-access-nqljb\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:06 crc kubenswrapper[5014]: I1205 11:13:06.500199 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:13:07 crc kubenswrapper[5014]: I1205 11:13:07.027748 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr"] Dec 05 11:13:07 crc kubenswrapper[5014]: I1205 11:13:07.104477 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" event={"ID":"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a","Type":"ContainerStarted","Data":"2f1a15ea8bb8f638a2e5cd0bdbad0a56480a6f98b3256076b393bf8b8447c6bb"} Dec 05 11:13:08 crc kubenswrapper[5014]: I1205 11:13:08.130522 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" event={"ID":"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a","Type":"ContainerStarted","Data":"71b298f90920e2f487cb677723a064edbbda8f2f274d7e5e0b3c4a0597daee26"} Dec 05 11:13:08 crc kubenswrapper[5014]: I1205 11:13:08.155098 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" podStartSLOduration=1.649747558 podStartE2EDuration="2.155075741s" podCreationTimestamp="2025-12-05 11:13:06 +0000 UTC" firstStartedPulling="2025-12-05 11:13:07.032774879 +0000 UTC m=+1513.980892593" lastFinishedPulling="2025-12-05 11:13:07.538103072 +0000 UTC m=+1514.486220776" observedRunningTime="2025-12-05 11:13:08.151461633 +0000 UTC m=+1515.099579387" watchObservedRunningTime="2025-12-05 11:13:08.155075741 +0000 UTC m=+1515.103193455" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.106115 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xkvsk"] Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.109211 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.136534 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xkvsk"] Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.248145 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-utilities\") pod \"community-operators-xkvsk\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.248218 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-catalog-content\") pod \"community-operators-xkvsk\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.248302 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj7vn\" (UniqueName: \"kubernetes.io/projected/373da960-3448-4c98-ab58-a82fa3816b8c-kube-api-access-dj7vn\") pod \"community-operators-xkvsk\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.350212 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-utilities\") pod \"community-operators-xkvsk\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.350610 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-catalog-content\") pod \"community-operators-xkvsk\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.350798 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj7vn\" (UniqueName: \"kubernetes.io/projected/373da960-3448-4c98-ab58-a82fa3816b8c-kube-api-access-dj7vn\") pod \"community-operators-xkvsk\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.350890 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-utilities\") pod \"community-operators-xkvsk\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.351063 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-catalog-content\") pod \"community-operators-xkvsk\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.386620 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj7vn\" (UniqueName: \"kubernetes.io/projected/373da960-3448-4c98-ab58-a82fa3816b8c-kube-api-access-dj7vn\") pod \"community-operators-xkvsk\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:23 crc kubenswrapper[5014]: I1205 11:13:23.434390 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:24 crc kubenswrapper[5014]: I1205 11:13:24.019591 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xkvsk"] Dec 05 11:13:24 crc kubenswrapper[5014]: I1205 11:13:24.289399 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xkvsk" event={"ID":"373da960-3448-4c98-ab58-a82fa3816b8c","Type":"ContainerStarted","Data":"83cd41ddb493f9a1a0c4ff268e69ba067953b7133bc96446d1b606765d6043f3"} Dec 05 11:13:25 crc kubenswrapper[5014]: I1205 11:13:25.303099 5014 generic.go:334] "Generic (PLEG): container finished" podID="373da960-3448-4c98-ab58-a82fa3816b8c" containerID="57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce" exitCode=0 Dec 05 11:13:25 crc kubenswrapper[5014]: I1205 11:13:25.303208 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xkvsk" event={"ID":"373da960-3448-4c98-ab58-a82fa3816b8c","Type":"ContainerDied","Data":"57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce"} Dec 05 11:13:26 crc kubenswrapper[5014]: I1205 11:13:26.318725 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xkvsk" event={"ID":"373da960-3448-4c98-ab58-a82fa3816b8c","Type":"ContainerStarted","Data":"2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef"} Dec 05 11:13:27 crc kubenswrapper[5014]: I1205 11:13:27.329119 5014 generic.go:334] "Generic (PLEG): container finished" podID="373da960-3448-4c98-ab58-a82fa3816b8c" containerID="2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef" exitCode=0 Dec 05 11:13:27 crc kubenswrapper[5014]: I1205 11:13:27.334324 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xkvsk" event={"ID":"373da960-3448-4c98-ab58-a82fa3816b8c","Type":"ContainerDied","Data":"2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef"} Dec 05 11:13:28 crc kubenswrapper[5014]: I1205 11:13:28.357572 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xkvsk" event={"ID":"373da960-3448-4c98-ab58-a82fa3816b8c","Type":"ContainerStarted","Data":"bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea"} Dec 05 11:13:33 crc kubenswrapper[5014]: I1205 11:13:33.434651 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:33 crc kubenswrapper[5014]: I1205 11:13:33.435244 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:33 crc kubenswrapper[5014]: I1205 11:13:33.488186 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:33 crc kubenswrapper[5014]: I1205 11:13:33.507642 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xkvsk" podStartSLOduration=7.811481586 podStartE2EDuration="10.507615219s" podCreationTimestamp="2025-12-05 11:13:23 +0000 UTC" firstStartedPulling="2025-12-05 11:13:25.307685688 +0000 UTC m=+1532.255803392" lastFinishedPulling="2025-12-05 11:13:28.003819321 +0000 UTC m=+1534.951937025" observedRunningTime="2025-12-05 11:13:28.377936698 +0000 UTC m=+1535.326054402" watchObservedRunningTime="2025-12-05 11:13:33.507615219 +0000 UTC m=+1540.455732923" Dec 05 11:13:34 crc kubenswrapper[5014]: I1205 11:13:34.478855 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:34 crc kubenswrapper[5014]: I1205 11:13:34.529289 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xkvsk"] Dec 05 11:13:36 crc kubenswrapper[5014]: I1205 11:13:36.433576 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xkvsk" podUID="373da960-3448-4c98-ab58-a82fa3816b8c" containerName="registry-server" containerID="cri-o://bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea" gracePeriod=2 Dec 05 11:13:36 crc kubenswrapper[5014]: I1205 11:13:36.904896 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.024529 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dj7vn\" (UniqueName: \"kubernetes.io/projected/373da960-3448-4c98-ab58-a82fa3816b8c-kube-api-access-dj7vn\") pod \"373da960-3448-4c98-ab58-a82fa3816b8c\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.024599 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-utilities\") pod \"373da960-3448-4c98-ab58-a82fa3816b8c\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.024792 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-catalog-content\") pod \"373da960-3448-4c98-ab58-a82fa3816b8c\" (UID: \"373da960-3448-4c98-ab58-a82fa3816b8c\") " Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.025844 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-utilities" (OuterVolumeSpecName: "utilities") pod "373da960-3448-4c98-ab58-a82fa3816b8c" (UID: "373da960-3448-4c98-ab58-a82fa3816b8c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.030320 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/373da960-3448-4c98-ab58-a82fa3816b8c-kube-api-access-dj7vn" (OuterVolumeSpecName: "kube-api-access-dj7vn") pod "373da960-3448-4c98-ab58-a82fa3816b8c" (UID: "373da960-3448-4c98-ab58-a82fa3816b8c"). InnerVolumeSpecName "kube-api-access-dj7vn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.079739 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "373da960-3448-4c98-ab58-a82fa3816b8c" (UID: "373da960-3448-4c98-ab58-a82fa3816b8c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.127425 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dj7vn\" (UniqueName: \"kubernetes.io/projected/373da960-3448-4c98-ab58-a82fa3816b8c-kube-api-access-dj7vn\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.127475 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.127493 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/373da960-3448-4c98-ab58-a82fa3816b8c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.447876 5014 generic.go:334] "Generic (PLEG): container finished" podID="373da960-3448-4c98-ab58-a82fa3816b8c" containerID="bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea" exitCode=0 Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.447936 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xkvsk" event={"ID":"373da960-3448-4c98-ab58-a82fa3816b8c","Type":"ContainerDied","Data":"bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea"} Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.448003 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xkvsk" event={"ID":"373da960-3448-4c98-ab58-a82fa3816b8c","Type":"ContainerDied","Data":"83cd41ddb493f9a1a0c4ff268e69ba067953b7133bc96446d1b606765d6043f3"} Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.448000 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xkvsk" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.448023 5014 scope.go:117] "RemoveContainer" containerID="bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.479078 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xkvsk"] Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.479799 5014 scope.go:117] "RemoveContainer" containerID="2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.489558 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xkvsk"] Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.512493 5014 scope.go:117] "RemoveContainer" containerID="57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.557494 5014 scope.go:117] "RemoveContainer" containerID="bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea" Dec 05 11:13:37 crc kubenswrapper[5014]: E1205 11:13:37.557912 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea\": container with ID starting with bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea not found: ID does not exist" containerID="bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.557949 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea"} err="failed to get container status \"bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea\": rpc error: code = NotFound desc = could not find container \"bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea\": container with ID starting with bcb43c7a0eba74a483f84d6e0484a87c3304e0016e51aa4bbe777c3ec8ad40ea not found: ID does not exist" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.557970 5014 scope.go:117] "RemoveContainer" containerID="2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef" Dec 05 11:13:37 crc kubenswrapper[5014]: E1205 11:13:37.558352 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef\": container with ID starting with 2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef not found: ID does not exist" containerID="2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.558381 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef"} err="failed to get container status \"2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef\": rpc error: code = NotFound desc = could not find container \"2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef\": container with ID starting with 2a0af6fa93b8a1b0a9ab7018745990287118dc1f702a0ef4342a3769cfc10aef not found: ID does not exist" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.558399 5014 scope.go:117] "RemoveContainer" containerID="57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce" Dec 05 11:13:37 crc kubenswrapper[5014]: E1205 11:13:37.559115 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce\": container with ID starting with 57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce not found: ID does not exist" containerID="57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce" Dec 05 11:13:37 crc kubenswrapper[5014]: I1205 11:13:37.559165 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce"} err="failed to get container status \"57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce\": rpc error: code = NotFound desc = could not find container \"57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce\": container with ID starting with 57e5b9fd2a1cfd678cff29f5fd6690dbabb1cc034a584921b7c1e27a26dca5ce not found: ID does not exist" Dec 05 11:13:39 crc kubenswrapper[5014]: I1205 11:13:39.330028 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="373da960-3448-4c98-ab58-a82fa3816b8c" path="/var/lib/kubelet/pods/373da960-3448-4c98-ab58-a82fa3816b8c/volumes" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.285879 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2ctzg"] Dec 05 11:13:44 crc kubenswrapper[5014]: E1205 11:13:44.286773 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="373da960-3448-4c98-ab58-a82fa3816b8c" containerName="extract-content" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.286789 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="373da960-3448-4c98-ab58-a82fa3816b8c" containerName="extract-content" Dec 05 11:13:44 crc kubenswrapper[5014]: E1205 11:13:44.286825 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="373da960-3448-4c98-ab58-a82fa3816b8c" containerName="extract-utilities" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.286834 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="373da960-3448-4c98-ab58-a82fa3816b8c" containerName="extract-utilities" Dec 05 11:13:44 crc kubenswrapper[5014]: E1205 11:13:44.286855 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="373da960-3448-4c98-ab58-a82fa3816b8c" containerName="registry-server" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.286865 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="373da960-3448-4c98-ab58-a82fa3816b8c" containerName="registry-server" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.287103 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="373da960-3448-4c98-ab58-a82fa3816b8c" containerName="registry-server" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.288865 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.298342 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ctzg"] Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.380114 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-catalog-content\") pod \"redhat-marketplace-2ctzg\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.380232 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9srk8\" (UniqueName: \"kubernetes.io/projected/26cbd004-0e43-4223-bb45-2851441909b2-kube-api-access-9srk8\") pod \"redhat-marketplace-2ctzg\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.380299 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-utilities\") pod \"redhat-marketplace-2ctzg\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.482887 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9srk8\" (UniqueName: \"kubernetes.io/projected/26cbd004-0e43-4223-bb45-2851441909b2-kube-api-access-9srk8\") pod \"redhat-marketplace-2ctzg\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.484177 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-utilities\") pod \"redhat-marketplace-2ctzg\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.484653 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-catalog-content\") pod \"redhat-marketplace-2ctzg\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.484746 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-utilities\") pod \"redhat-marketplace-2ctzg\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.485307 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-catalog-content\") pod \"redhat-marketplace-2ctzg\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.507202 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9srk8\" (UniqueName: \"kubernetes.io/projected/26cbd004-0e43-4223-bb45-2851441909b2-kube-api-access-9srk8\") pod \"redhat-marketplace-2ctzg\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:44 crc kubenswrapper[5014]: I1205 11:13:44.608229 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:45 crc kubenswrapper[5014]: I1205 11:13:45.108177 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ctzg"] Dec 05 11:13:45 crc kubenswrapper[5014]: I1205 11:13:45.518075 5014 generic.go:334] "Generic (PLEG): container finished" podID="26cbd004-0e43-4223-bb45-2851441909b2" containerID="1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674" exitCode=0 Dec 05 11:13:45 crc kubenswrapper[5014]: I1205 11:13:45.518179 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ctzg" event={"ID":"26cbd004-0e43-4223-bb45-2851441909b2","Type":"ContainerDied","Data":"1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674"} Dec 05 11:13:45 crc kubenswrapper[5014]: I1205 11:13:45.518470 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ctzg" event={"ID":"26cbd004-0e43-4223-bb45-2851441909b2","Type":"ContainerStarted","Data":"03c734801fe7df609fc7756faefdb8f0963b9453f523c504dab624d862b089cd"} Dec 05 11:13:47 crc kubenswrapper[5014]: I1205 11:13:47.538154 5014 generic.go:334] "Generic (PLEG): container finished" podID="26cbd004-0e43-4223-bb45-2851441909b2" containerID="6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587" exitCode=0 Dec 05 11:13:47 crc kubenswrapper[5014]: I1205 11:13:47.538334 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ctzg" event={"ID":"26cbd004-0e43-4223-bb45-2851441909b2","Type":"ContainerDied","Data":"6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587"} Dec 05 11:13:48 crc kubenswrapper[5014]: I1205 11:13:48.548789 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ctzg" event={"ID":"26cbd004-0e43-4223-bb45-2851441909b2","Type":"ContainerStarted","Data":"27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95"} Dec 05 11:13:48 crc kubenswrapper[5014]: I1205 11:13:48.577041 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2ctzg" podStartSLOduration=2.151342399 podStartE2EDuration="4.577022375s" podCreationTimestamp="2025-12-05 11:13:44 +0000 UTC" firstStartedPulling="2025-12-05 11:13:45.519489076 +0000 UTC m=+1552.467606780" lastFinishedPulling="2025-12-05 11:13:47.945169042 +0000 UTC m=+1554.893286756" observedRunningTime="2025-12-05 11:13:48.571288255 +0000 UTC m=+1555.519405969" watchObservedRunningTime="2025-12-05 11:13:48.577022375 +0000 UTC m=+1555.525140079" Dec 05 11:13:54 crc kubenswrapper[5014]: I1205 11:13:54.609437 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:54 crc kubenswrapper[5014]: I1205 11:13:54.609955 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:54 crc kubenswrapper[5014]: I1205 11:13:54.659599 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:55 crc kubenswrapper[5014]: I1205 11:13:55.664813 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:55 crc kubenswrapper[5014]: I1205 11:13:55.714531 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ctzg"] Dec 05 11:13:57 crc kubenswrapper[5014]: I1205 11:13:57.632955 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2ctzg" podUID="26cbd004-0e43-4223-bb45-2851441909b2" containerName="registry-server" containerID="cri-o://27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95" gracePeriod=2 Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.167177 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.267948 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9srk8\" (UniqueName: \"kubernetes.io/projected/26cbd004-0e43-4223-bb45-2851441909b2-kube-api-access-9srk8\") pod \"26cbd004-0e43-4223-bb45-2851441909b2\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.268156 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-utilities\") pod \"26cbd004-0e43-4223-bb45-2851441909b2\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.268354 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-catalog-content\") pod \"26cbd004-0e43-4223-bb45-2851441909b2\" (UID: \"26cbd004-0e43-4223-bb45-2851441909b2\") " Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.268954 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-utilities" (OuterVolumeSpecName: "utilities") pod "26cbd004-0e43-4223-bb45-2851441909b2" (UID: "26cbd004-0e43-4223-bb45-2851441909b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.279004 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26cbd004-0e43-4223-bb45-2851441909b2-kube-api-access-9srk8" (OuterVolumeSpecName: "kube-api-access-9srk8") pod "26cbd004-0e43-4223-bb45-2851441909b2" (UID: "26cbd004-0e43-4223-bb45-2851441909b2"). InnerVolumeSpecName "kube-api-access-9srk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.287470 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "26cbd004-0e43-4223-bb45-2851441909b2" (UID: "26cbd004-0e43-4223-bb45-2851441909b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.372741 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.372789 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9srk8\" (UniqueName: \"kubernetes.io/projected/26cbd004-0e43-4223-bb45-2851441909b2-kube-api-access-9srk8\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.372804 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26cbd004-0e43-4223-bb45-2851441909b2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.646093 5014 generic.go:334] "Generic (PLEG): container finished" podID="26cbd004-0e43-4223-bb45-2851441909b2" containerID="27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95" exitCode=0 Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.646165 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ctzg" event={"ID":"26cbd004-0e43-4223-bb45-2851441909b2","Type":"ContainerDied","Data":"27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95"} Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.646233 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ctzg" event={"ID":"26cbd004-0e43-4223-bb45-2851441909b2","Type":"ContainerDied","Data":"03c734801fe7df609fc7756faefdb8f0963b9453f523c504dab624d862b089cd"} Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.646231 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ctzg" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.646254 5014 scope.go:117] "RemoveContainer" containerID="27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.664472 5014 scope.go:117] "RemoveContainer" containerID="6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.686213 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ctzg"] Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.694594 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ctzg"] Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.707953 5014 scope.go:117] "RemoveContainer" containerID="1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.739974 5014 scope.go:117] "RemoveContainer" containerID="27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95" Dec 05 11:13:58 crc kubenswrapper[5014]: E1205 11:13:58.740513 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95\": container with ID starting with 27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95 not found: ID does not exist" containerID="27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.740576 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95"} err="failed to get container status \"27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95\": rpc error: code = NotFound desc = could not find container \"27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95\": container with ID starting with 27b4bd0c4e1549cb16ecfd3d92635b67f1a036b592b599e4a8f9e26af66f5e95 not found: ID does not exist" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.740612 5014 scope.go:117] "RemoveContainer" containerID="6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587" Dec 05 11:13:58 crc kubenswrapper[5014]: E1205 11:13:58.740943 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587\": container with ID starting with 6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587 not found: ID does not exist" containerID="6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.740984 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587"} err="failed to get container status \"6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587\": rpc error: code = NotFound desc = could not find container \"6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587\": container with ID starting with 6526eed6478b7d078c92a65871be50655f1bc2ed831a528e7725acc6a80c4587 not found: ID does not exist" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.741011 5014 scope.go:117] "RemoveContainer" containerID="1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674" Dec 05 11:13:58 crc kubenswrapper[5014]: E1205 11:13:58.741242 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674\": container with ID starting with 1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674 not found: ID does not exist" containerID="1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674" Dec 05 11:13:58 crc kubenswrapper[5014]: I1205 11:13:58.741266 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674"} err="failed to get container status \"1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674\": rpc error: code = NotFound desc = could not find container \"1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674\": container with ID starting with 1c389fce29d0f0cf99d5f54573d0825ce9e48d126dec812bcd8ab49208b7b674 not found: ID does not exist" Dec 05 11:13:59 crc kubenswrapper[5014]: I1205 11:13:59.329971 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26cbd004-0e43-4223-bb45-2851441909b2" path="/var/lib/kubelet/pods/26cbd004-0e43-4223-bb45-2851441909b2/volumes" Dec 05 11:14:03 crc kubenswrapper[5014]: I1205 11:14:03.569762 5014 scope.go:117] "RemoveContainer" containerID="ca45bc629a54bb9822178789a6eaa7dbd592e9a1e24ba9bc6b7292973ca89575" Dec 05 11:14:03 crc kubenswrapper[5014]: I1205 11:14:03.595049 5014 scope.go:117] "RemoveContainer" containerID="75ce8647ee9947930fccb4bfe2c1284131fbb30df25d1af67604e546a21e60fb" Dec 05 11:14:03 crc kubenswrapper[5014]: I1205 11:14:03.643400 5014 scope.go:117] "RemoveContainer" containerID="4a31852627b25d3b366e0362681bacb7705b8cfb0d21ebdbb2a2618984bbb033" Dec 05 11:14:03 crc kubenswrapper[5014]: I1205 11:14:03.679398 5014 scope.go:117] "RemoveContainer" containerID="0f76d4a9503fcc7f1d5cdab4a8c2506f01d2045033378f935502e9b9823ad0cf" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.074327 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mqbp7"] Dec 05 11:14:31 crc kubenswrapper[5014]: E1205 11:14:31.075337 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26cbd004-0e43-4223-bb45-2851441909b2" containerName="extract-content" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.075351 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="26cbd004-0e43-4223-bb45-2851441909b2" containerName="extract-content" Dec 05 11:14:31 crc kubenswrapper[5014]: E1205 11:14:31.075380 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26cbd004-0e43-4223-bb45-2851441909b2" containerName="extract-utilities" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.075387 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="26cbd004-0e43-4223-bb45-2851441909b2" containerName="extract-utilities" Dec 05 11:14:31 crc kubenswrapper[5014]: E1205 11:14:31.075399 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26cbd004-0e43-4223-bb45-2851441909b2" containerName="registry-server" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.075405 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="26cbd004-0e43-4223-bb45-2851441909b2" containerName="registry-server" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.075581 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="26cbd004-0e43-4223-bb45-2851441909b2" containerName="registry-server" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.081640 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.095710 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mqbp7"] Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.215791 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4vp2\" (UniqueName: \"kubernetes.io/projected/346e84e1-2fdd-4cf0-af77-da463b4fde0f-kube-api-access-c4vp2\") pod \"certified-operators-mqbp7\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.215935 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-utilities\") pod \"certified-operators-mqbp7\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.216138 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-catalog-content\") pod \"certified-operators-mqbp7\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.317887 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4vp2\" (UniqueName: \"kubernetes.io/projected/346e84e1-2fdd-4cf0-af77-da463b4fde0f-kube-api-access-c4vp2\") pod \"certified-operators-mqbp7\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.318034 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-utilities\") pod \"certified-operators-mqbp7\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.318104 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-catalog-content\") pod \"certified-operators-mqbp7\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.318694 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-utilities\") pod \"certified-operators-mqbp7\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.318786 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-catalog-content\") pod \"certified-operators-mqbp7\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.342187 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4vp2\" (UniqueName: \"kubernetes.io/projected/346e84e1-2fdd-4cf0-af77-da463b4fde0f-kube-api-access-c4vp2\") pod \"certified-operators-mqbp7\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:31 crc kubenswrapper[5014]: I1205 11:14:31.408019 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:32 crc kubenswrapper[5014]: I1205 11:14:32.464990 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mqbp7"] Dec 05 11:14:32 crc kubenswrapper[5014]: I1205 11:14:32.955391 5014 generic.go:334] "Generic (PLEG): container finished" podID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerID="efcc041bb071b918b2b1bbd79e9e9599b002268d7a63d6108f77865f39ade039" exitCode=0 Dec 05 11:14:32 crc kubenswrapper[5014]: I1205 11:14:32.955512 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqbp7" event={"ID":"346e84e1-2fdd-4cf0-af77-da463b4fde0f","Type":"ContainerDied","Data":"efcc041bb071b918b2b1bbd79e9e9599b002268d7a63d6108f77865f39ade039"} Dec 05 11:14:32 crc kubenswrapper[5014]: I1205 11:14:32.955750 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqbp7" event={"ID":"346e84e1-2fdd-4cf0-af77-da463b4fde0f","Type":"ContainerStarted","Data":"fe6202be6ecb29f95c045b74ba62f92bfa94d8d7e5ad21ae032ae31e610db20c"} Dec 05 11:14:33 crc kubenswrapper[5014]: I1205 11:14:33.967055 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqbp7" event={"ID":"346e84e1-2fdd-4cf0-af77-da463b4fde0f","Type":"ContainerStarted","Data":"794f170170be8689a0b15fb55aa557b04d87ed00ecd7d4d30d257a2ca709d958"} Dec 05 11:14:34 crc kubenswrapper[5014]: I1205 11:14:34.977805 5014 generic.go:334] "Generic (PLEG): container finished" podID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerID="794f170170be8689a0b15fb55aa557b04d87ed00ecd7d4d30d257a2ca709d958" exitCode=0 Dec 05 11:14:34 crc kubenswrapper[5014]: I1205 11:14:34.977867 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqbp7" event={"ID":"346e84e1-2fdd-4cf0-af77-da463b4fde0f","Type":"ContainerDied","Data":"794f170170be8689a0b15fb55aa557b04d87ed00ecd7d4d30d257a2ca709d958"} Dec 05 11:14:35 crc kubenswrapper[5014]: I1205 11:14:35.988090 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqbp7" event={"ID":"346e84e1-2fdd-4cf0-af77-da463b4fde0f","Type":"ContainerStarted","Data":"939a69729a1b85e3f547155e41544957618a6331c4fa83dfe579058cd15d3dae"} Dec 05 11:14:36 crc kubenswrapper[5014]: I1205 11:14:36.014690 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mqbp7" podStartSLOduration=2.574391719 podStartE2EDuration="5.014669473s" podCreationTimestamp="2025-12-05 11:14:31 +0000 UTC" firstStartedPulling="2025-12-05 11:14:32.95780575 +0000 UTC m=+1599.905923454" lastFinishedPulling="2025-12-05 11:14:35.398083514 +0000 UTC m=+1602.346201208" observedRunningTime="2025-12-05 11:14:36.003900771 +0000 UTC m=+1602.952018485" watchObservedRunningTime="2025-12-05 11:14:36.014669473 +0000 UTC m=+1602.962787177" Dec 05 11:14:41 crc kubenswrapper[5014]: I1205 11:14:41.409217 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:41 crc kubenswrapper[5014]: I1205 11:14:41.409785 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:41 crc kubenswrapper[5014]: I1205 11:14:41.459977 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:42 crc kubenswrapper[5014]: I1205 11:14:42.110549 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:42 crc kubenswrapper[5014]: I1205 11:14:42.165547 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mqbp7"] Dec 05 11:14:44 crc kubenswrapper[5014]: I1205 11:14:44.076492 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mqbp7" podUID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerName="registry-server" containerID="cri-o://939a69729a1b85e3f547155e41544957618a6331c4fa83dfe579058cd15d3dae" gracePeriod=2 Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.099375 5014 generic.go:334] "Generic (PLEG): container finished" podID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerID="939a69729a1b85e3f547155e41544957618a6331c4fa83dfe579058cd15d3dae" exitCode=0 Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.100337 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqbp7" event={"ID":"346e84e1-2fdd-4cf0-af77-da463b4fde0f","Type":"ContainerDied","Data":"939a69729a1b85e3f547155e41544957618a6331c4fa83dfe579058cd15d3dae"} Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.363226 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.539173 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-catalog-content\") pod \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.539327 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4vp2\" (UniqueName: \"kubernetes.io/projected/346e84e1-2fdd-4cf0-af77-da463b4fde0f-kube-api-access-c4vp2\") pod \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.539364 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-utilities\") pod \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\" (UID: \"346e84e1-2fdd-4cf0-af77-da463b4fde0f\") " Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.540358 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-utilities" (OuterVolumeSpecName: "utilities") pod "346e84e1-2fdd-4cf0-af77-da463b4fde0f" (UID: "346e84e1-2fdd-4cf0-af77-da463b4fde0f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.548922 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/346e84e1-2fdd-4cf0-af77-da463b4fde0f-kube-api-access-c4vp2" (OuterVolumeSpecName: "kube-api-access-c4vp2") pod "346e84e1-2fdd-4cf0-af77-da463b4fde0f" (UID: "346e84e1-2fdd-4cf0-af77-da463b4fde0f"). InnerVolumeSpecName "kube-api-access-c4vp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.595568 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "346e84e1-2fdd-4cf0-af77-da463b4fde0f" (UID: "346e84e1-2fdd-4cf0-af77-da463b4fde0f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.642982 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.643036 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4vp2\" (UniqueName: \"kubernetes.io/projected/346e84e1-2fdd-4cf0-af77-da463b4fde0f-kube-api-access-c4vp2\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:46 crc kubenswrapper[5014]: I1205 11:14:46.643067 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346e84e1-2fdd-4cf0-af77-da463b4fde0f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:14:47 crc kubenswrapper[5014]: I1205 11:14:47.115313 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqbp7" event={"ID":"346e84e1-2fdd-4cf0-af77-da463b4fde0f","Type":"ContainerDied","Data":"fe6202be6ecb29f95c045b74ba62f92bfa94d8d7e5ad21ae032ae31e610db20c"} Dec 05 11:14:47 crc kubenswrapper[5014]: I1205 11:14:47.115661 5014 scope.go:117] "RemoveContainer" containerID="939a69729a1b85e3f547155e41544957618a6331c4fa83dfe579058cd15d3dae" Dec 05 11:14:47 crc kubenswrapper[5014]: I1205 11:14:47.115423 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqbp7" Dec 05 11:14:47 crc kubenswrapper[5014]: I1205 11:14:47.145875 5014 scope.go:117] "RemoveContainer" containerID="794f170170be8689a0b15fb55aa557b04d87ed00ecd7d4d30d257a2ca709d958" Dec 05 11:14:47 crc kubenswrapper[5014]: I1205 11:14:47.149110 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mqbp7"] Dec 05 11:14:47 crc kubenswrapper[5014]: I1205 11:14:47.158083 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mqbp7"] Dec 05 11:14:47 crc kubenswrapper[5014]: I1205 11:14:47.181783 5014 scope.go:117] "RemoveContainer" containerID="efcc041bb071b918b2b1bbd79e9e9599b002268d7a63d6108f77865f39ade039" Dec 05 11:14:47 crc kubenswrapper[5014]: I1205 11:14:47.329306 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" path="/var/lib/kubelet/pods/346e84e1-2fdd-4cf0-af77-da463b4fde0f/volumes" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.153149 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw"] Dec 05 11:15:00 crc kubenswrapper[5014]: E1205 11:15:00.154101 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerName="extract-utilities" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.154117 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerName="extract-utilities" Dec 05 11:15:00 crc kubenswrapper[5014]: E1205 11:15:00.154154 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerName="extract-content" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.154160 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerName="extract-content" Dec 05 11:15:00 crc kubenswrapper[5014]: E1205 11:15:00.154177 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerName="registry-server" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.154184 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerName="registry-server" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.154441 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="346e84e1-2fdd-4cf0-af77-da463b4fde0f" containerName="registry-server" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.155084 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.162727 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.162736 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.166123 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw"] Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.307832 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-config-volume\") pod \"collect-profiles-29415555-sx2lw\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.307955 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fsv9\" (UniqueName: \"kubernetes.io/projected/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-kube-api-access-2fsv9\") pod \"collect-profiles-29415555-sx2lw\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.307987 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-secret-volume\") pod \"collect-profiles-29415555-sx2lw\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.409952 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-config-volume\") pod \"collect-profiles-29415555-sx2lw\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.410049 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fsv9\" (UniqueName: \"kubernetes.io/projected/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-kube-api-access-2fsv9\") pod \"collect-profiles-29415555-sx2lw\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.410070 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-secret-volume\") pod \"collect-profiles-29415555-sx2lw\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.411298 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-config-volume\") pod \"collect-profiles-29415555-sx2lw\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.416827 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-secret-volume\") pod \"collect-profiles-29415555-sx2lw\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.428874 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fsv9\" (UniqueName: \"kubernetes.io/projected/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-kube-api-access-2fsv9\") pod \"collect-profiles-29415555-sx2lw\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.497314 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:00 crc kubenswrapper[5014]: I1205 11:15:00.970660 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw"] Dec 05 11:15:01 crc kubenswrapper[5014]: I1205 11:15:01.231878 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" event={"ID":"a0ad9bdc-7eeb-451e-a9ed-cb7421206906","Type":"ContainerStarted","Data":"47dc1f1a90e9825137b7d99f8474b8ff1492d25eabad65fec236fc4ad9ab4959"} Dec 05 11:15:01 crc kubenswrapper[5014]: I1205 11:15:01.232221 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" event={"ID":"a0ad9bdc-7eeb-451e-a9ed-cb7421206906","Type":"ContainerStarted","Data":"ca48ebd4a61fc2fd6a4cf13b920496d04678f29f8b8b2dc94cc93649ba75fd0d"} Dec 05 11:15:01 crc kubenswrapper[5014]: I1205 11:15:01.255412 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" podStartSLOduration=1.255391586 podStartE2EDuration="1.255391586s" podCreationTimestamp="2025-12-05 11:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 11:15:01.248928148 +0000 UTC m=+1628.197045862" watchObservedRunningTime="2025-12-05 11:15:01.255391586 +0000 UTC m=+1628.203509290" Dec 05 11:15:02 crc kubenswrapper[5014]: I1205 11:15:02.936787 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:15:02 crc kubenswrapper[5014]: I1205 11:15:02.937104 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:15:03 crc kubenswrapper[5014]: I1205 11:15:03.253421 5014 generic.go:334] "Generic (PLEG): container finished" podID="a0ad9bdc-7eeb-451e-a9ed-cb7421206906" containerID="47dc1f1a90e9825137b7d99f8474b8ff1492d25eabad65fec236fc4ad9ab4959" exitCode=0 Dec 05 11:15:03 crc kubenswrapper[5014]: I1205 11:15:03.253478 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" event={"ID":"a0ad9bdc-7eeb-451e-a9ed-cb7421206906","Type":"ContainerDied","Data":"47dc1f1a90e9825137b7d99f8474b8ff1492d25eabad65fec236fc4ad9ab4959"} Dec 05 11:15:04 crc kubenswrapper[5014]: I1205 11:15:04.615519 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:04 crc kubenswrapper[5014]: I1205 11:15:04.700042 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fsv9\" (UniqueName: \"kubernetes.io/projected/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-kube-api-access-2fsv9\") pod \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " Dec 05 11:15:04 crc kubenswrapper[5014]: I1205 11:15:04.707610 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-kube-api-access-2fsv9" (OuterVolumeSpecName: "kube-api-access-2fsv9") pod "a0ad9bdc-7eeb-451e-a9ed-cb7421206906" (UID: "a0ad9bdc-7eeb-451e-a9ed-cb7421206906"). InnerVolumeSpecName "kube-api-access-2fsv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:15:04 crc kubenswrapper[5014]: I1205 11:15:04.801170 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-secret-volume\") pod \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " Dec 05 11:15:04 crc kubenswrapper[5014]: I1205 11:15:04.801540 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-config-volume\") pod \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\" (UID: \"a0ad9bdc-7eeb-451e-a9ed-cb7421206906\") " Dec 05 11:15:04 crc kubenswrapper[5014]: I1205 11:15:04.801914 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fsv9\" (UniqueName: \"kubernetes.io/projected/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-kube-api-access-2fsv9\") on node \"crc\" DevicePath \"\"" Dec 05 11:15:04 crc kubenswrapper[5014]: I1205 11:15:04.802879 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-config-volume" (OuterVolumeSpecName: "config-volume") pod "a0ad9bdc-7eeb-451e-a9ed-cb7421206906" (UID: "a0ad9bdc-7eeb-451e-a9ed-cb7421206906"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:15:04 crc kubenswrapper[5014]: I1205 11:15:04.805161 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a0ad9bdc-7eeb-451e-a9ed-cb7421206906" (UID: "a0ad9bdc-7eeb-451e-a9ed-cb7421206906"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:15:04 crc kubenswrapper[5014]: I1205 11:15:04.903122 5014 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:15:04 crc kubenswrapper[5014]: I1205 11:15:04.903153 5014 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0ad9bdc-7eeb-451e-a9ed-cb7421206906-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:15:05 crc kubenswrapper[5014]: I1205 11:15:05.271419 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" event={"ID":"a0ad9bdc-7eeb-451e-a9ed-cb7421206906","Type":"ContainerDied","Data":"ca48ebd4a61fc2fd6a4cf13b920496d04678f29f8b8b2dc94cc93649ba75fd0d"} Dec 05 11:15:05 crc kubenswrapper[5014]: I1205 11:15:05.271464 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca48ebd4a61fc2fd6a4cf13b920496d04678f29f8b8b2dc94cc93649ba75fd0d" Dec 05 11:15:05 crc kubenswrapper[5014]: I1205 11:15:05.271473 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw" Dec 05 11:15:32 crc kubenswrapper[5014]: I1205 11:15:32.936507 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:15:32 crc kubenswrapper[5014]: I1205 11:15:32.938215 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:16:02 crc kubenswrapper[5014]: I1205 11:16:02.936784 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:16:02 crc kubenswrapper[5014]: I1205 11:16:02.938546 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:16:02 crc kubenswrapper[5014]: I1205 11:16:02.938678 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:16:02 crc kubenswrapper[5014]: I1205 11:16:02.939654 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:16:02 crc kubenswrapper[5014]: I1205 11:16:02.939801 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" gracePeriod=600 Dec 05 11:16:03 crc kubenswrapper[5014]: E1205 11:16:03.073066 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:16:03 crc kubenswrapper[5014]: I1205 11:16:03.795851 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" exitCode=0 Dec 05 11:16:03 crc kubenswrapper[5014]: I1205 11:16:03.795911 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889"} Dec 05 11:16:03 crc kubenswrapper[5014]: I1205 11:16:03.795974 5014 scope.go:117] "RemoveContainer" containerID="0d6d61cc21a88a778a6896c0ce3a742c000804cdd014c81b67f82fc215c25138" Dec 05 11:16:03 crc kubenswrapper[5014]: I1205 11:16:03.796823 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:16:03 crc kubenswrapper[5014]: E1205 11:16:03.797209 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:16:17 crc kubenswrapper[5014]: I1205 11:16:17.319059 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:16:17 crc kubenswrapper[5014]: E1205 11:16:17.320033 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:16:32 crc kubenswrapper[5014]: I1205 11:16:32.318523 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:16:32 crc kubenswrapper[5014]: E1205 11:16:32.319403 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:16:46 crc kubenswrapper[5014]: I1205 11:16:46.318935 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:16:46 crc kubenswrapper[5014]: E1205 11:16:46.320410 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:16:58 crc kubenswrapper[5014]: I1205 11:16:58.318074 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:16:58 crc kubenswrapper[5014]: E1205 11:16:58.318950 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:17:03 crc kubenswrapper[5014]: I1205 11:17:03.837695 5014 scope.go:117] "RemoveContainer" containerID="a270aa9e58e6d1e81130adf28324cb68cdf67724cf4377ac0f05b5314e1185c3" Dec 05 11:17:10 crc kubenswrapper[5014]: I1205 11:17:10.319173 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:17:10 crc kubenswrapper[5014]: E1205 11:17:10.320012 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:17:23 crc kubenswrapper[5014]: I1205 11:17:23.325657 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:17:23 crc kubenswrapper[5014]: E1205 11:17:23.326323 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.050215 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-nj5xc"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.071431 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-58ab-account-create-update-m6mpt"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.083782 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-7cba-account-create-update-5s99p"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.093578 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-853a-account-create-update-dk6wb"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.108174 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-nj5xc"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.119642 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-lfvsl"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.131603 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-zmtmw"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.143176 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-7cba-account-create-update-5s99p"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.153948 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-853a-account-create-update-dk6wb"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.163907 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-lfvsl"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.173726 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-58ab-account-create-update-m6mpt"] Dec 05 11:17:24 crc kubenswrapper[5014]: I1205 11:17:24.183436 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-zmtmw"] Dec 05 11:17:25 crc kubenswrapper[5014]: I1205 11:17:25.330565 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="404eef16-b758-4079-bc0e-cf8c9a17ff11" path="/var/lib/kubelet/pods/404eef16-b758-4079-bc0e-cf8c9a17ff11/volumes" Dec 05 11:17:25 crc kubenswrapper[5014]: I1205 11:17:25.331592 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="727cdfb6-ffb7-4e5f-9aed-b856d87bb80e" path="/var/lib/kubelet/pods/727cdfb6-ffb7-4e5f-9aed-b856d87bb80e/volumes" Dec 05 11:17:25 crc kubenswrapper[5014]: I1205 11:17:25.332175 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="908fbcc8-261b-40a5-9f70-b041c908b47e" path="/var/lib/kubelet/pods/908fbcc8-261b-40a5-9f70-b041c908b47e/volumes" Dec 05 11:17:25 crc kubenswrapper[5014]: I1205 11:17:25.332731 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d05249d-225e-46be-9441-572d372fd7ab" path="/var/lib/kubelet/pods/9d05249d-225e-46be-9441-572d372fd7ab/volumes" Dec 05 11:17:25 crc kubenswrapper[5014]: I1205 11:17:25.333871 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bef9ec2f-df6e-4409-8e88-8ae5895faa24" path="/var/lib/kubelet/pods/bef9ec2f-df6e-4409-8e88-8ae5895faa24/volumes" Dec 05 11:17:25 crc kubenswrapper[5014]: I1205 11:17:25.334493 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbef5fef-ac9d-4c62-af8e-9956ce64a080" path="/var/lib/kubelet/pods/dbef5fef-ac9d-4c62-af8e-9956ce64a080/volumes" Dec 05 11:17:31 crc kubenswrapper[5014]: I1205 11:17:31.582108 5014 generic.go:334] "Generic (PLEG): container finished" podID="4fc14e6b-fae1-4d4c-96f8-f5a86422a20a" containerID="71b298f90920e2f487cb677723a064edbbda8f2f274d7e5e0b3c4a0597daee26" exitCode=0 Dec 05 11:17:31 crc kubenswrapper[5014]: I1205 11:17:31.582361 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" event={"ID":"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a","Type":"ContainerDied","Data":"71b298f90920e2f487cb677723a064edbbda8f2f274d7e5e0b3c4a0597daee26"} Dec 05 11:17:32 crc kubenswrapper[5014]: I1205 11:17:32.998072 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.071694 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-ssh-key\") pod \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.072088 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-inventory\") pod \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.072354 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqljb\" (UniqueName: \"kubernetes.io/projected/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-kube-api-access-nqljb\") pod \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.072516 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-bootstrap-combined-ca-bundle\") pod \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\" (UID: \"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a\") " Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.078523 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "4fc14e6b-fae1-4d4c-96f8-f5a86422a20a" (UID: "4fc14e6b-fae1-4d4c-96f8-f5a86422a20a"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.078678 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-kube-api-access-nqljb" (OuterVolumeSpecName: "kube-api-access-nqljb") pod "4fc14e6b-fae1-4d4c-96f8-f5a86422a20a" (UID: "4fc14e6b-fae1-4d4c-96f8-f5a86422a20a"). InnerVolumeSpecName "kube-api-access-nqljb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.101766 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4fc14e6b-fae1-4d4c-96f8-f5a86422a20a" (UID: "4fc14e6b-fae1-4d4c-96f8-f5a86422a20a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.112745 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-inventory" (OuterVolumeSpecName: "inventory") pod "4fc14e6b-fae1-4d4c-96f8-f5a86422a20a" (UID: "4fc14e6b-fae1-4d4c-96f8-f5a86422a20a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.175862 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.175896 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.175906 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqljb\" (UniqueName: \"kubernetes.io/projected/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-kube-api-access-nqljb\") on node \"crc\" DevicePath \"\"" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.175923 5014 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc14e6b-fae1-4d4c-96f8-f5a86422a20a-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.604556 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" event={"ID":"4fc14e6b-fae1-4d4c-96f8-f5a86422a20a","Type":"ContainerDied","Data":"2f1a15ea8bb8f638a2e5cd0bdbad0a56480a6f98b3256076b393bf8b8447c6bb"} Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.604595 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f1a15ea8bb8f638a2e5cd0bdbad0a56480a6f98b3256076b393bf8b8447c6bb" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.604615 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.698264 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6"] Dec 05 11:17:33 crc kubenswrapper[5014]: E1205 11:17:33.698744 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0ad9bdc-7eeb-451e-a9ed-cb7421206906" containerName="collect-profiles" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.698768 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0ad9bdc-7eeb-451e-a9ed-cb7421206906" containerName="collect-profiles" Dec 05 11:17:33 crc kubenswrapper[5014]: E1205 11:17:33.698796 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fc14e6b-fae1-4d4c-96f8-f5a86422a20a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.698805 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fc14e6b-fae1-4d4c-96f8-f5a86422a20a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.698999 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0ad9bdc-7eeb-451e-a9ed-cb7421206906" containerName="collect-profiles" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.699019 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fc14e6b-fae1-4d4c-96f8-f5a86422a20a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.699740 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.702213 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.702409 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.702505 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.702842 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.721438 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6"] Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.787231 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.787532 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtv64\" (UniqueName: \"kubernetes.io/projected/82e883e4-b7b9-463c-99e5-ac0a855a22cd-kube-api-access-rtv64\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.787579 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.889858 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtv64\" (UniqueName: \"kubernetes.io/projected/82e883e4-b7b9-463c-99e5-ac0a855a22cd-kube-api-access-rtv64\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.889954 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.890051 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.896928 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.898164 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:33 crc kubenswrapper[5014]: I1205 11:17:33.909120 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtv64\" (UniqueName: \"kubernetes.io/projected/82e883e4-b7b9-463c-99e5-ac0a855a22cd-kube-api-access-rtv64\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:34 crc kubenswrapper[5014]: I1205 11:17:34.015977 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:17:34 crc kubenswrapper[5014]: I1205 11:17:34.553697 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6"] Dec 05 11:17:34 crc kubenswrapper[5014]: I1205 11:17:34.560862 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:17:34 crc kubenswrapper[5014]: I1205 11:17:34.614054 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" event={"ID":"82e883e4-b7b9-463c-99e5-ac0a855a22cd","Type":"ContainerStarted","Data":"afbc42bf58d7148739f666748a2bf3322d016cc25c663e325a596adaf31f2a78"} Dec 05 11:17:35 crc kubenswrapper[5014]: I1205 11:17:35.624796 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" event={"ID":"82e883e4-b7b9-463c-99e5-ac0a855a22cd","Type":"ContainerStarted","Data":"d920aeab89d156797cae64835c3e11debd48288110579eb98838b11d5f57a378"} Dec 05 11:17:35 crc kubenswrapper[5014]: I1205 11:17:35.649178 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" podStartSLOduration=2.2393158890000002 podStartE2EDuration="2.649155734s" podCreationTimestamp="2025-12-05 11:17:33 +0000 UTC" firstStartedPulling="2025-12-05 11:17:34.560668206 +0000 UTC m=+1781.508785910" lastFinishedPulling="2025-12-05 11:17:34.970508051 +0000 UTC m=+1781.918625755" observedRunningTime="2025-12-05 11:17:35.644338936 +0000 UTC m=+1782.592456640" watchObservedRunningTime="2025-12-05 11:17:35.649155734 +0000 UTC m=+1782.597273438" Dec 05 11:17:36 crc kubenswrapper[5014]: I1205 11:17:36.318782 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:17:36 crc kubenswrapper[5014]: E1205 11:17:36.319359 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:17:48 crc kubenswrapper[5014]: I1205 11:17:48.318773 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:17:48 crc kubenswrapper[5014]: E1205 11:17:48.319580 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.067198 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-26ee-account-create-update-mnkx4"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.081218 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ff76-account-create-update-mnw77"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.094524 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-qb5xt"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.102137 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-914c-account-create-update-7m424"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.109599 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-26ee-account-create-update-mnkx4"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.117305 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-dz8p5"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.124787 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-nwrsm"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.132857 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-ff76-account-create-update-mnw77"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.140599 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-914c-account-create-update-7m424"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.147983 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-qb5xt"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.155314 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-nwrsm"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.162537 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-dz8p5"] Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.330048 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17393775-7e86-44ba-8fcc-d502ed251de0" path="/var/lib/kubelet/pods/17393775-7e86-44ba-8fcc-d502ed251de0/volumes" Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.330924 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b3c2731-7987-4449-bc49-71d8f679b8b6" path="/var/lib/kubelet/pods/5b3c2731-7987-4449-bc49-71d8f679b8b6/volumes" Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.331723 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c290d7c-34a6-40f1-838d-379d5cee5319" path="/var/lib/kubelet/pods/5c290d7c-34a6-40f1-838d-379d5cee5319/volumes" Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.332553 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64e940f0-5a75-41aa-86e5-8da00fd4fe1b" path="/var/lib/kubelet/pods/64e940f0-5a75-41aa-86e5-8da00fd4fe1b/volumes" Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.334009 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71c84bd0-ef34-4a59-9542-4abfbf3ffb40" path="/var/lib/kubelet/pods/71c84bd0-ef34-4a59-9542-4abfbf3ffb40/volumes" Dec 05 11:17:53 crc kubenswrapper[5014]: I1205 11:17:53.334811 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a613db58-0a2a-4960-a361-b02a32ed6713" path="/var/lib/kubelet/pods/a613db58-0a2a-4960-a361-b02a32ed6713/volumes" Dec 05 11:17:55 crc kubenswrapper[5014]: I1205 11:17:55.032488 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-8hlb6"] Dec 05 11:17:55 crc kubenswrapper[5014]: I1205 11:17:55.040092 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-8hlb6"] Dec 05 11:17:55 crc kubenswrapper[5014]: I1205 11:17:55.331963 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a59ccde-127a-4709-8ea1-efd59b48504f" path="/var/lib/kubelet/pods/9a59ccde-127a-4709-8ea1-efd59b48504f/volumes" Dec 05 11:17:57 crc kubenswrapper[5014]: I1205 11:17:57.036723 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-9jkh2"] Dec 05 11:17:57 crc kubenswrapper[5014]: I1205 11:17:57.045768 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-9jkh2"] Dec 05 11:17:57 crc kubenswrapper[5014]: I1205 11:17:57.328840 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d02ac887-7c2b-4eea-bf6f-795359aa8b14" path="/var/lib/kubelet/pods/d02ac887-7c2b-4eea-bf6f-795359aa8b14/volumes" Dec 05 11:18:03 crc kubenswrapper[5014]: I1205 11:18:03.328530 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:18:03 crc kubenswrapper[5014]: E1205 11:18:03.329525 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:18:03 crc kubenswrapper[5014]: I1205 11:18:03.910309 5014 scope.go:117] "RemoveContainer" containerID="345a6703ed9d14e9283ac4bae1e8267ef6e05ea14c6d6dde9a55d87ddff8f875" Dec 05 11:18:03 crc kubenswrapper[5014]: I1205 11:18:03.967399 5014 scope.go:117] "RemoveContainer" containerID="22c6d0c3a24d20e99969a2e8c1ef5b363a9c9588a33efdd405df8faf75eb6e1c" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.001172 5014 scope.go:117] "RemoveContainer" containerID="0543cdb8eda5b3fb598c4009f0a990a403780ebe9097d42fad28008089ceedfa" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.054844 5014 scope.go:117] "RemoveContainer" containerID="b09ba23a8f3d79286c29fab2eb193acc70aafc4f6974ea83bfcf6c0554fd2bd4" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.087127 5014 scope.go:117] "RemoveContainer" containerID="f8dbfa425be9978604e0fd4ec68f93736423a7f9293fba8e89a8d2aecfd9b3f7" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.128321 5014 scope.go:117] "RemoveContainer" containerID="277f825b0886af74e153dd6df8fe783d0e0b39885a7de3f0842d8caa6db93f9a" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.180336 5014 scope.go:117] "RemoveContainer" containerID="abe50be9dd8d0a00f62d8acce1aa9168554c2e21ef19aad43a6c354e448eca4b" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.208401 5014 scope.go:117] "RemoveContainer" containerID="7bff38f0f38a342547c3fe2deb6daa0f4d60f0e6fc928375e15778085441c09a" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.231492 5014 scope.go:117] "RemoveContainer" containerID="9a66f6c710634dd17a659e4bc3df6445db3864596381cd9c2ca97e620b9a0c1b" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.249981 5014 scope.go:117] "RemoveContainer" containerID="f4b2ac3215592674bbfc6d78eb54803963fa13c601a4479b4ed488c17d8d805f" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.279218 5014 scope.go:117] "RemoveContainer" containerID="4737acc92b37695fddb60d25976e76f21f0e21349485234480ba45da5a95e862" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.302923 5014 scope.go:117] "RemoveContainer" containerID="31caa4e1064501a178e9c926ae479f09dd1bff42e68ba2f6f6865d0fae3ba4a0" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.329143 5014 scope.go:117] "RemoveContainer" containerID="5a0cf3d49f206f3a3137a5f6e5974e246d7030caa84f0ff30e86c711780d1a2b" Dec 05 11:18:04 crc kubenswrapper[5014]: I1205 11:18:04.350251 5014 scope.go:117] "RemoveContainer" containerID="de8e96d8b6dc3949c7215642d69644721be24ab154dba0c113e3e41dc051ebfd" Dec 05 11:18:17 crc kubenswrapper[5014]: I1205 11:18:17.319040 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:18:17 crc kubenswrapper[5014]: E1205 11:18:17.320467 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:18:28 crc kubenswrapper[5014]: I1205 11:18:28.318060 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:18:28 crc kubenswrapper[5014]: E1205 11:18:28.318877 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:18:39 crc kubenswrapper[5014]: I1205 11:18:39.318502 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:18:39 crc kubenswrapper[5014]: E1205 11:18:39.319396 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:18:44 crc kubenswrapper[5014]: I1205 11:18:44.049435 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-cw4lt"] Dec 05 11:18:44 crc kubenswrapper[5014]: I1205 11:18:44.057476 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-cw4lt"] Dec 05 11:18:44 crc kubenswrapper[5014]: I1205 11:18:44.069176 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-5zmlx"] Dec 05 11:18:44 crc kubenswrapper[5014]: I1205 11:18:44.078083 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-5zmlx"] Dec 05 11:18:45 crc kubenswrapper[5014]: I1205 11:18:45.337851 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ff8aa8f-72f5-4431-8bc4-758dd55acdcf" path="/var/lib/kubelet/pods/3ff8aa8f-72f5-4431-8bc4-758dd55acdcf/volumes" Dec 05 11:18:45 crc kubenswrapper[5014]: I1205 11:18:45.339798 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce61806f-b767-42f7-bef7-e11d70d55086" path="/var/lib/kubelet/pods/ce61806f-b767-42f7-bef7-e11d70d55086/volumes" Dec 05 11:18:46 crc kubenswrapper[5014]: I1205 11:18:46.033446 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-2nfpj"] Dec 05 11:18:46 crc kubenswrapper[5014]: I1205 11:18:46.041506 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-2nfpj"] Dec 05 11:18:47 crc kubenswrapper[5014]: I1205 11:18:47.341426 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a1f3060-95d4-4b6e-a029-505738f01238" path="/var/lib/kubelet/pods/7a1f3060-95d4-4b6e-a029-505738f01238/volumes" Dec 05 11:18:49 crc kubenswrapper[5014]: I1205 11:18:49.043548 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-fnq4z"] Dec 05 11:18:49 crc kubenswrapper[5014]: I1205 11:18:49.050983 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-fnq4z"] Dec 05 11:18:49 crc kubenswrapper[5014]: I1205 11:18:49.332256 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb060c27-a3ff-4233-9c8f-a5614f4ef60b" path="/var/lib/kubelet/pods/eb060c27-a3ff-4233-9c8f-a5614f4ef60b/volumes" Dec 05 11:18:50 crc kubenswrapper[5014]: I1205 11:18:50.318923 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:18:50 crc kubenswrapper[5014]: E1205 11:18:50.319650 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:19:02 crc kubenswrapper[5014]: I1205 11:19:02.032628 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-zshwt"] Dec 05 11:19:02 crc kubenswrapper[5014]: I1205 11:19:02.042678 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-zshwt"] Dec 05 11:19:02 crc kubenswrapper[5014]: I1205 11:19:02.318838 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:19:02 crc kubenswrapper[5014]: E1205 11:19:02.319110 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:19:03 crc kubenswrapper[5014]: I1205 11:19:03.330938 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36756ede-ab38-444f-8f4a-a07da8173882" path="/var/lib/kubelet/pods/36756ede-ab38-444f-8f4a-a07da8173882/volumes" Dec 05 11:19:04 crc kubenswrapper[5014]: I1205 11:19:04.589658 5014 scope.go:117] "RemoveContainer" containerID="374859f6acd833eec3d01eb214ae18360f03c0bb616482cedb65d51ec5502169" Dec 05 11:19:04 crc kubenswrapper[5014]: I1205 11:19:04.656514 5014 scope.go:117] "RemoveContainer" containerID="e974b527a018cf4f07c7fc3f7d0aaff67a71464cf335065fb89189be64c92ab9" Dec 05 11:19:04 crc kubenswrapper[5014]: I1205 11:19:04.705007 5014 scope.go:117] "RemoveContainer" containerID="3d5c72537e2031b13edb5f744d523cbd55b66e44375dae4086a93e06984d5c13" Dec 05 11:19:04 crc kubenswrapper[5014]: I1205 11:19:04.761454 5014 scope.go:117] "RemoveContainer" containerID="69d1d8d0afd5c43eff1ba839dd2673eaf592cf90fb23209f5b59be261e2502c9" Dec 05 11:19:04 crc kubenswrapper[5014]: I1205 11:19:04.812361 5014 scope.go:117] "RemoveContainer" containerID="f8f46c752ae3707151ef0059d630531aead2e7fdc5de832aeb5656ddaf24ebe4" Dec 05 11:19:14 crc kubenswrapper[5014]: I1205 11:19:14.318431 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:19:14 crc kubenswrapper[5014]: E1205 11:19:14.319218 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:19:28 crc kubenswrapper[5014]: I1205 11:19:28.318222 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:19:28 crc kubenswrapper[5014]: E1205 11:19:28.319457 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:19:43 crc kubenswrapper[5014]: I1205 11:19:43.325194 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:19:43 crc kubenswrapper[5014]: E1205 11:19:43.326091 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:19:47 crc kubenswrapper[5014]: I1205 11:19:47.066944 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-bq9r8"] Dec 05 11:19:47 crc kubenswrapper[5014]: I1205 11:19:47.076591 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-bq9r8"] Dec 05 11:19:47 crc kubenswrapper[5014]: I1205 11:19:47.341493 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca2c99e9-b93d-4c6b-95b5-f0457af5d14e" path="/var/lib/kubelet/pods/ca2c99e9-b93d-4c6b-95b5-f0457af5d14e/volumes" Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.033741 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-6l8fm"] Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.048475 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-b24t5"] Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.058803 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-8549-account-create-update-gs47k"] Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.068523 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-6l8fm"] Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.076108 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-b24t5"] Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.085812 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-8549-account-create-update-gs47k"] Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.096318 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-f499-account-create-update-jx4jj"] Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.103975 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-f499-account-create-update-jx4jj"] Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.111041 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-8768-account-create-update-8svtq"] Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.117721 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-8768-account-create-update-8svtq"] Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.336034 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35aeff68-6195-4a18-bcc6-d744b43632bf" path="/var/lib/kubelet/pods/35aeff68-6195-4a18-bcc6-d744b43632bf/volumes" Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.337425 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48fc994f-42ab-43fa-9f54-8b36b4c1379d" path="/var/lib/kubelet/pods/48fc994f-42ab-43fa-9f54-8b36b4c1379d/volumes" Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.338678 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75fbb5c7-8300-4ee3-9c24-6c05220babbb" path="/var/lib/kubelet/pods/75fbb5c7-8300-4ee3-9c24-6c05220babbb/volumes" Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.339917 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8948faa6-4a90-4c87-b1ce-43fadf3f4548" path="/var/lib/kubelet/pods/8948faa6-4a90-4c87-b1ce-43fadf3f4548/volumes" Dec 05 11:19:49 crc kubenswrapper[5014]: I1205 11:19:49.342593 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af106b04-f0eb-4a65-bfb9-fe618fab8363" path="/var/lib/kubelet/pods/af106b04-f0eb-4a65-bfb9-fe618fab8363/volumes" Dec 05 11:19:54 crc kubenswrapper[5014]: I1205 11:19:54.319782 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:19:54 crc kubenswrapper[5014]: E1205 11:19:54.320208 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:20:04 crc kubenswrapper[5014]: I1205 11:20:04.943873 5014 scope.go:117] "RemoveContainer" containerID="040b2418a90c7f480a0d95309d8a09a53ef2edc99d91873141b8f6a942a9747a" Dec 05 11:20:04 crc kubenswrapper[5014]: I1205 11:20:04.979234 5014 scope.go:117] "RemoveContainer" containerID="57210b7bc39877dd3f757f1288fd0bb1008f4dc568162e18f3a3f2acab49fd43" Dec 05 11:20:05 crc kubenswrapper[5014]: I1205 11:20:05.018546 5014 scope.go:117] "RemoveContainer" containerID="05ef908d99f937c96f32bfb4f1d831ad80435aec58da00ed951321d75d71102e" Dec 05 11:20:05 crc kubenswrapper[5014]: I1205 11:20:05.066000 5014 scope.go:117] "RemoveContainer" containerID="7561be037724c144ef110d0196efc381fbf90df2e2dab2df37d625b23f71b9cf" Dec 05 11:20:05 crc kubenswrapper[5014]: I1205 11:20:05.113860 5014 scope.go:117] "RemoveContainer" containerID="3f23bcbab17fb896e9af8a057d99c75ba344b32b7ba6b4301c39a010c0a811cd" Dec 05 11:20:05 crc kubenswrapper[5014]: I1205 11:20:05.160168 5014 scope.go:117] "RemoveContainer" containerID="ba4b3ed9a33f89af8eea5534aba5be3d466e6373c202cef8020761e3b1b23abc" Dec 05 11:20:06 crc kubenswrapper[5014]: I1205 11:20:06.318828 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:20:06 crc kubenswrapper[5014]: E1205 11:20:06.319315 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:20:14 crc kubenswrapper[5014]: I1205 11:20:14.266329 5014 generic.go:334] "Generic (PLEG): container finished" podID="82e883e4-b7b9-463c-99e5-ac0a855a22cd" containerID="d920aeab89d156797cae64835c3e11debd48288110579eb98838b11d5f57a378" exitCode=0 Dec 05 11:20:14 crc kubenswrapper[5014]: I1205 11:20:14.266430 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" event={"ID":"82e883e4-b7b9-463c-99e5-ac0a855a22cd","Type":"ContainerDied","Data":"d920aeab89d156797cae64835c3e11debd48288110579eb98838b11d5f57a378"} Dec 05 11:20:15 crc kubenswrapper[5014]: I1205 11:20:15.708000 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:20:15 crc kubenswrapper[5014]: I1205 11:20:15.759770 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-ssh-key\") pod \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " Dec 05 11:20:15 crc kubenswrapper[5014]: I1205 11:20:15.759950 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-inventory\") pod \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " Dec 05 11:20:15 crc kubenswrapper[5014]: I1205 11:20:15.760236 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtv64\" (UniqueName: \"kubernetes.io/projected/82e883e4-b7b9-463c-99e5-ac0a855a22cd-kube-api-access-rtv64\") pod \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\" (UID: \"82e883e4-b7b9-463c-99e5-ac0a855a22cd\") " Dec 05 11:20:15 crc kubenswrapper[5014]: I1205 11:20:15.771786 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82e883e4-b7b9-463c-99e5-ac0a855a22cd-kube-api-access-rtv64" (OuterVolumeSpecName: "kube-api-access-rtv64") pod "82e883e4-b7b9-463c-99e5-ac0a855a22cd" (UID: "82e883e4-b7b9-463c-99e5-ac0a855a22cd"). InnerVolumeSpecName "kube-api-access-rtv64". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:20:15 crc kubenswrapper[5014]: I1205 11:20:15.786956 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "82e883e4-b7b9-463c-99e5-ac0a855a22cd" (UID: "82e883e4-b7b9-463c-99e5-ac0a855a22cd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:20:15 crc kubenswrapper[5014]: I1205 11:20:15.789220 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-inventory" (OuterVolumeSpecName: "inventory") pod "82e883e4-b7b9-463c-99e5-ac0a855a22cd" (UID: "82e883e4-b7b9-463c-99e5-ac0a855a22cd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:20:15 crc kubenswrapper[5014]: I1205 11:20:15.862313 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:15 crc kubenswrapper[5014]: I1205 11:20:15.862352 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82e883e4-b7b9-463c-99e5-ac0a855a22cd-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:15 crc kubenswrapper[5014]: I1205 11:20:15.862367 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtv64\" (UniqueName: \"kubernetes.io/projected/82e883e4-b7b9-463c-99e5-ac0a855a22cd-kube-api-access-rtv64\") on node \"crc\" DevicePath \"\"" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.283844 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" event={"ID":"82e883e4-b7b9-463c-99e5-ac0a855a22cd","Type":"ContainerDied","Data":"afbc42bf58d7148739f666748a2bf3322d016cc25c663e325a596adaf31f2a78"} Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.284177 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afbc42bf58d7148739f666748a2bf3322d016cc25c663e325a596adaf31f2a78" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.283895 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.377727 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55"] Dec 05 11:20:16 crc kubenswrapper[5014]: E1205 11:20:16.378213 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82e883e4-b7b9-463c-99e5-ac0a855a22cd" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.378241 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="82e883e4-b7b9-463c-99e5-ac0a855a22cd" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.378489 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="82e883e4-b7b9-463c-99e5-ac0a855a22cd" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.379249 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.381770 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.382018 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.382051 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.382188 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.392196 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55"] Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.472196 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p7rp\" (UniqueName: \"kubernetes.io/projected/209c8894-646c-40b1-a33f-3890d10b3e28-kube-api-access-5p7rp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m9t55\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.472332 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m9t55\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.472380 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m9t55\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.573833 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p7rp\" (UniqueName: \"kubernetes.io/projected/209c8894-646c-40b1-a33f-3890d10b3e28-kube-api-access-5p7rp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m9t55\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.573901 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m9t55\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.573953 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m9t55\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.578321 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m9t55\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.578393 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m9t55\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.593445 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p7rp\" (UniqueName: \"kubernetes.io/projected/209c8894-646c-40b1-a33f-3890d10b3e28-kube-api-access-5p7rp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-m9t55\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:16 crc kubenswrapper[5014]: I1205 11:20:16.709978 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:20:17 crc kubenswrapper[5014]: I1205 11:20:17.266799 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55"] Dec 05 11:20:17 crc kubenswrapper[5014]: I1205 11:20:17.295706 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" event={"ID":"209c8894-646c-40b1-a33f-3890d10b3e28","Type":"ContainerStarted","Data":"baaa91e07a5dbf470d13919127ba2fcc2ca9dddbd41cf10c32a8ec919041e9b5"} Dec 05 11:20:18 crc kubenswrapper[5014]: I1205 11:20:18.062594 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xjq65"] Dec 05 11:20:18 crc kubenswrapper[5014]: I1205 11:20:18.078212 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xjq65"] Dec 05 11:20:18 crc kubenswrapper[5014]: I1205 11:20:18.313125 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" event={"ID":"209c8894-646c-40b1-a33f-3890d10b3e28","Type":"ContainerStarted","Data":"5db95f602afe7f3e5476b924e1985af66d25b67c2d07cbaaf335fe9a2ff25987"} Dec 05 11:20:18 crc kubenswrapper[5014]: I1205 11:20:18.331834 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" podStartSLOduration=1.861327282 podStartE2EDuration="2.331814695s" podCreationTimestamp="2025-12-05 11:20:16 +0000 UTC" firstStartedPulling="2025-12-05 11:20:17.274806708 +0000 UTC m=+1944.222924452" lastFinishedPulling="2025-12-05 11:20:17.745294161 +0000 UTC m=+1944.693411865" observedRunningTime="2025-12-05 11:20:18.327831468 +0000 UTC m=+1945.275949182" watchObservedRunningTime="2025-12-05 11:20:18.331814695 +0000 UTC m=+1945.279932399" Dec 05 11:20:19 crc kubenswrapper[5014]: I1205 11:20:19.332585 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5301290-e01e-40a5-ba62-bec11488a2e6" path="/var/lib/kubelet/pods/d5301290-e01e-40a5-ba62-bec11488a2e6/volumes" Dec 05 11:20:20 crc kubenswrapper[5014]: I1205 11:20:20.318863 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:20:20 crc kubenswrapper[5014]: E1205 11:20:20.319368 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:20:35 crc kubenswrapper[5014]: I1205 11:20:35.319033 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:20:35 crc kubenswrapper[5014]: E1205 11:20:35.319876 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:20:39 crc kubenswrapper[5014]: I1205 11:20:39.039885 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-xkr6j"] Dec 05 11:20:39 crc kubenswrapper[5014]: I1205 11:20:39.051735 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-xkr6j"] Dec 05 11:20:39 crc kubenswrapper[5014]: I1205 11:20:39.331035 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf" path="/var/lib/kubelet/pods/cee1a18a-3dbd-40a0-a81e-7c5ebfa7bdaf/volumes" Dec 05 11:20:40 crc kubenswrapper[5014]: I1205 11:20:40.027808 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bb4zk"] Dec 05 11:20:40 crc kubenswrapper[5014]: I1205 11:20:40.036470 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bb4zk"] Dec 05 11:20:41 crc kubenswrapper[5014]: I1205 11:20:41.328043 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="167e48bc-c3dd-464f-b4ed-9109ffc6de7d" path="/var/lib/kubelet/pods/167e48bc-c3dd-464f-b4ed-9109ffc6de7d/volumes" Dec 05 11:20:50 crc kubenswrapper[5014]: I1205 11:20:50.318605 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:20:50 crc kubenswrapper[5014]: E1205 11:20:50.319469 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:21:05 crc kubenswrapper[5014]: I1205 11:21:05.280261 5014 scope.go:117] "RemoveContainer" containerID="ef2d66cbce0da6e6eb448a6ee733ee3758c0635fa383059619b9caee5715929c" Dec 05 11:21:05 crc kubenswrapper[5014]: I1205 11:21:05.319576 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:21:05 crc kubenswrapper[5014]: I1205 11:21:05.336089 5014 scope.go:117] "RemoveContainer" containerID="47ef7cc458f3ef485675039a98477765e3ae2d9525caa0186c26df7be102e80b" Dec 05 11:21:05 crc kubenswrapper[5014]: I1205 11:21:05.388828 5014 scope.go:117] "RemoveContainer" containerID="993c4f1fcff568c62368371c02074c7b709267753f182b64ea886085a3c51790" Dec 05 11:21:05 crc kubenswrapper[5014]: I1205 11:21:05.751889 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"b1d3a1f29e694656b97d30c4a386bb91494e6280ba532e3e802b9a1c08bf1bf2"} Dec 05 11:21:24 crc kubenswrapper[5014]: I1205 11:21:24.043260 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-kvrd6"] Dec 05 11:21:24 crc kubenswrapper[5014]: I1205 11:21:24.052488 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-kvrd6"] Dec 05 11:21:25 crc kubenswrapper[5014]: I1205 11:21:25.329557 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5" path="/var/lib/kubelet/pods/7ead0c5f-aa2f-4d95-af5f-5e0ee24ecae5/volumes" Dec 05 11:21:33 crc kubenswrapper[5014]: I1205 11:21:33.002945 5014 generic.go:334] "Generic (PLEG): container finished" podID="209c8894-646c-40b1-a33f-3890d10b3e28" containerID="5db95f602afe7f3e5476b924e1985af66d25b67c2d07cbaaf335fe9a2ff25987" exitCode=0 Dec 05 11:21:33 crc kubenswrapper[5014]: I1205 11:21:33.003033 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" event={"ID":"209c8894-646c-40b1-a33f-3890d10b3e28","Type":"ContainerDied","Data":"5db95f602afe7f3e5476b924e1985af66d25b67c2d07cbaaf335fe9a2ff25987"} Dec 05 11:21:34 crc kubenswrapper[5014]: I1205 11:21:34.459453 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:21:34 crc kubenswrapper[5014]: I1205 11:21:34.616762 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p7rp\" (UniqueName: \"kubernetes.io/projected/209c8894-646c-40b1-a33f-3890d10b3e28-kube-api-access-5p7rp\") pod \"209c8894-646c-40b1-a33f-3890d10b3e28\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " Dec 05 11:21:34 crc kubenswrapper[5014]: I1205 11:21:34.616878 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-ssh-key\") pod \"209c8894-646c-40b1-a33f-3890d10b3e28\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " Dec 05 11:21:34 crc kubenswrapper[5014]: I1205 11:21:34.617026 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-inventory\") pod \"209c8894-646c-40b1-a33f-3890d10b3e28\" (UID: \"209c8894-646c-40b1-a33f-3890d10b3e28\") " Dec 05 11:21:34 crc kubenswrapper[5014]: I1205 11:21:34.626737 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/209c8894-646c-40b1-a33f-3890d10b3e28-kube-api-access-5p7rp" (OuterVolumeSpecName: "kube-api-access-5p7rp") pod "209c8894-646c-40b1-a33f-3890d10b3e28" (UID: "209c8894-646c-40b1-a33f-3890d10b3e28"). InnerVolumeSpecName "kube-api-access-5p7rp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:21:34 crc kubenswrapper[5014]: I1205 11:21:34.654065 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-inventory" (OuterVolumeSpecName: "inventory") pod "209c8894-646c-40b1-a33f-3890d10b3e28" (UID: "209c8894-646c-40b1-a33f-3890d10b3e28"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:21:34 crc kubenswrapper[5014]: I1205 11:21:34.678852 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "209c8894-646c-40b1-a33f-3890d10b3e28" (UID: "209c8894-646c-40b1-a33f-3890d10b3e28"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:21:34 crc kubenswrapper[5014]: I1205 11:21:34.720443 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:21:34 crc kubenswrapper[5014]: I1205 11:21:34.720506 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p7rp\" (UniqueName: \"kubernetes.io/projected/209c8894-646c-40b1-a33f-3890d10b3e28-kube-api-access-5p7rp\") on node \"crc\" DevicePath \"\"" Dec 05 11:21:34 crc kubenswrapper[5014]: I1205 11:21:34.720524 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/209c8894-646c-40b1-a33f-3890d10b3e28-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.021889 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" event={"ID":"209c8894-646c-40b1-a33f-3890d10b3e28","Type":"ContainerDied","Data":"baaa91e07a5dbf470d13919127ba2fcc2ca9dddbd41cf10c32a8ec919041e9b5"} Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.021931 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="baaa91e07a5dbf470d13919127ba2fcc2ca9dddbd41cf10c32a8ec919041e9b5" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.021973 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-m9t55" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.107230 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g"] Dec 05 11:21:35 crc kubenswrapper[5014]: E1205 11:21:35.107628 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="209c8894-646c-40b1-a33f-3890d10b3e28" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.107645 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="209c8894-646c-40b1-a33f-3890d10b3e28" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.107814 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="209c8894-646c-40b1-a33f-3890d10b3e28" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.108472 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.112389 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.112993 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.113100 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.113164 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.120843 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g"] Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.229234 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jv96g\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.229299 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swz7p\" (UniqueName: \"kubernetes.io/projected/69a4f49e-1b6f-4085-81da-69b0e099b769-kube-api-access-swz7p\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jv96g\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.229379 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jv96g\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.331240 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swz7p\" (UniqueName: \"kubernetes.io/projected/69a4f49e-1b6f-4085-81da-69b0e099b769-kube-api-access-swz7p\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jv96g\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.331291 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jv96g\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.331340 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jv96g\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.335250 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jv96g\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.336265 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jv96g\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.348776 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swz7p\" (UniqueName: \"kubernetes.io/projected/69a4f49e-1b6f-4085-81da-69b0e099b769-kube-api-access-swz7p\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jv96g\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.437849 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:35 crc kubenswrapper[5014]: I1205 11:21:35.952557 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g"] Dec 05 11:21:36 crc kubenswrapper[5014]: I1205 11:21:36.033500 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" event={"ID":"69a4f49e-1b6f-4085-81da-69b0e099b769","Type":"ContainerStarted","Data":"6aca5fe8a76a4ca8b97dee9d49cf27ac4c9914591ce77d1c069be06d2b9946a8"} Dec 05 11:21:37 crc kubenswrapper[5014]: I1205 11:21:37.043363 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" event={"ID":"69a4f49e-1b6f-4085-81da-69b0e099b769","Type":"ContainerStarted","Data":"be1e244db37923d0788850f80875899373ec1b117f235d86db4aca22225c0c4e"} Dec 05 11:21:37 crc kubenswrapper[5014]: I1205 11:21:37.063389 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" podStartSLOduration=1.617808726 podStartE2EDuration="2.063366441s" podCreationTimestamp="2025-12-05 11:21:35 +0000 UTC" firstStartedPulling="2025-12-05 11:21:35.962671493 +0000 UTC m=+2022.910789197" lastFinishedPulling="2025-12-05 11:21:36.408229198 +0000 UTC m=+2023.356346912" observedRunningTime="2025-12-05 11:21:37.056885824 +0000 UTC m=+2024.005003528" watchObservedRunningTime="2025-12-05 11:21:37.063366441 +0000 UTC m=+2024.011484145" Dec 05 11:21:42 crc kubenswrapper[5014]: I1205 11:21:42.087571 5014 generic.go:334] "Generic (PLEG): container finished" podID="69a4f49e-1b6f-4085-81da-69b0e099b769" containerID="be1e244db37923d0788850f80875899373ec1b117f235d86db4aca22225c0c4e" exitCode=0 Dec 05 11:21:42 crc kubenswrapper[5014]: I1205 11:21:42.087661 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" event={"ID":"69a4f49e-1b6f-4085-81da-69b0e099b769","Type":"ContainerDied","Data":"be1e244db37923d0788850f80875899373ec1b117f235d86db4aca22225c0c4e"} Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.131053 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xjmg6"] Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.133614 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.153500 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xjmg6"] Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.291677 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-catalog-content\") pod \"redhat-operators-xjmg6\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.291782 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgfk4\" (UniqueName: \"kubernetes.io/projected/ee771c6a-d897-4bee-aa99-2e1df58c3dee-kube-api-access-fgfk4\") pod \"redhat-operators-xjmg6\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.291852 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-utilities\") pod \"redhat-operators-xjmg6\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.394407 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-utilities\") pod \"redhat-operators-xjmg6\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.394600 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-catalog-content\") pod \"redhat-operators-xjmg6\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.394690 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgfk4\" (UniqueName: \"kubernetes.io/projected/ee771c6a-d897-4bee-aa99-2e1df58c3dee-kube-api-access-fgfk4\") pod \"redhat-operators-xjmg6\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.395223 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-utilities\") pod \"redhat-operators-xjmg6\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.395500 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-catalog-content\") pod \"redhat-operators-xjmg6\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.419117 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgfk4\" (UniqueName: \"kubernetes.io/projected/ee771c6a-d897-4bee-aa99-2e1df58c3dee-kube-api-access-fgfk4\") pod \"redhat-operators-xjmg6\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.472155 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.606602 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.702853 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swz7p\" (UniqueName: \"kubernetes.io/projected/69a4f49e-1b6f-4085-81da-69b0e099b769-kube-api-access-swz7p\") pod \"69a4f49e-1b6f-4085-81da-69b0e099b769\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.702984 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-ssh-key\") pod \"69a4f49e-1b6f-4085-81da-69b0e099b769\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.703028 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-inventory\") pod \"69a4f49e-1b6f-4085-81da-69b0e099b769\" (UID: \"69a4f49e-1b6f-4085-81da-69b0e099b769\") " Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.711502 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69a4f49e-1b6f-4085-81da-69b0e099b769-kube-api-access-swz7p" (OuterVolumeSpecName: "kube-api-access-swz7p") pod "69a4f49e-1b6f-4085-81da-69b0e099b769" (UID: "69a4f49e-1b6f-4085-81da-69b0e099b769"). InnerVolumeSpecName "kube-api-access-swz7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.748893 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "69a4f49e-1b6f-4085-81da-69b0e099b769" (UID: "69a4f49e-1b6f-4085-81da-69b0e099b769"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.751849 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-inventory" (OuterVolumeSpecName: "inventory") pod "69a4f49e-1b6f-4085-81da-69b0e099b769" (UID: "69a4f49e-1b6f-4085-81da-69b0e099b769"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.771663 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xjmg6"] Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.805876 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swz7p\" (UniqueName: \"kubernetes.io/projected/69a4f49e-1b6f-4085-81da-69b0e099b769-kube-api-access-swz7p\") on node \"crc\" DevicePath \"\"" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.805914 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:21:43 crc kubenswrapper[5014]: I1205 11:21:43.805926 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69a4f49e-1b6f-4085-81da-69b0e099b769-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.108230 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" event={"ID":"69a4f49e-1b6f-4085-81da-69b0e099b769","Type":"ContainerDied","Data":"6aca5fe8a76a4ca8b97dee9d49cf27ac4c9914591ce77d1c069be06d2b9946a8"} Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.108738 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6aca5fe8a76a4ca8b97dee9d49cf27ac4c9914591ce77d1c069be06d2b9946a8" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.108870 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jv96g" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.117085 5014 generic.go:334] "Generic (PLEG): container finished" podID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerID="8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7" exitCode=0 Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.117146 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xjmg6" event={"ID":"ee771c6a-d897-4bee-aa99-2e1df58c3dee","Type":"ContainerDied","Data":"8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7"} Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.117180 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xjmg6" event={"ID":"ee771c6a-d897-4bee-aa99-2e1df58c3dee","Type":"ContainerStarted","Data":"30a374ee9672aab9fce14ac7e39474dbae713668e59fd40998e7b16e0fbf14fa"} Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.189382 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m"] Dec 05 11:21:44 crc kubenswrapper[5014]: E1205 11:21:44.189827 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69a4f49e-1b6f-4085-81da-69b0e099b769" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.189845 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="69a4f49e-1b6f-4085-81da-69b0e099b769" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.190102 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="69a4f49e-1b6f-4085-81da-69b0e099b769" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.190838 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.193468 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.193753 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.193939 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.194050 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.201681 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m"] Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.321643 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jns8m\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.321853 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkkjj\" (UniqueName: \"kubernetes.io/projected/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-kube-api-access-kkkjj\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jns8m\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.321907 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jns8m\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.423549 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkkjj\" (UniqueName: \"kubernetes.io/projected/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-kube-api-access-kkkjj\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jns8m\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.423598 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jns8m\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.423701 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jns8m\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.429924 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jns8m\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.429997 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jns8m\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.446987 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkkjj\" (UniqueName: \"kubernetes.io/projected/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-kube-api-access-kkkjj\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-jns8m\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:44 crc kubenswrapper[5014]: I1205 11:21:44.521636 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:21:45 crc kubenswrapper[5014]: I1205 11:21:45.009125 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m"] Dec 05 11:21:45 crc kubenswrapper[5014]: W1205 11:21:45.019589 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6df89661_8d7a_4ea6_b3ca_4560ecc324f3.slice/crio-c611bac10f9420e8fd8e6346427da2bbd50b9f78532f17f971f12c2b5269d363 WatchSource:0}: Error finding container c611bac10f9420e8fd8e6346427da2bbd50b9f78532f17f971f12c2b5269d363: Status 404 returned error can't find the container with id c611bac10f9420e8fd8e6346427da2bbd50b9f78532f17f971f12c2b5269d363 Dec 05 11:21:45 crc kubenswrapper[5014]: I1205 11:21:45.130017 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" event={"ID":"6df89661-8d7a-4ea6-b3ca-4560ecc324f3","Type":"ContainerStarted","Data":"c611bac10f9420e8fd8e6346427da2bbd50b9f78532f17f971f12c2b5269d363"} Dec 05 11:21:47 crc kubenswrapper[5014]: I1205 11:21:47.152642 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" event={"ID":"6df89661-8d7a-4ea6-b3ca-4560ecc324f3","Type":"ContainerStarted","Data":"904137f3c375e47475965cebada912acf3794a6951b5384c8f1a0422776abe3b"} Dec 05 11:21:47 crc kubenswrapper[5014]: I1205 11:21:47.157249 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xjmg6" event={"ID":"ee771c6a-d897-4bee-aa99-2e1df58c3dee","Type":"ContainerStarted","Data":"fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2"} Dec 05 11:21:47 crc kubenswrapper[5014]: I1205 11:21:47.179963 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" podStartSLOduration=1.9065998579999999 podStartE2EDuration="3.179945257s" podCreationTimestamp="2025-12-05 11:21:44 +0000 UTC" firstStartedPulling="2025-12-05 11:21:45.022030915 +0000 UTC m=+2031.970148619" lastFinishedPulling="2025-12-05 11:21:46.295376304 +0000 UTC m=+2033.243494018" observedRunningTime="2025-12-05 11:21:47.172133827 +0000 UTC m=+2034.120251541" watchObservedRunningTime="2025-12-05 11:21:47.179945257 +0000 UTC m=+2034.128062961" Dec 05 11:21:49 crc kubenswrapper[5014]: I1205 11:21:49.194077 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xjmg6" event={"ID":"ee771c6a-d897-4bee-aa99-2e1df58c3dee","Type":"ContainerDied","Data":"fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2"} Dec 05 11:21:49 crc kubenswrapper[5014]: I1205 11:21:49.194084 5014 generic.go:334] "Generic (PLEG): container finished" podID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerID="fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2" exitCode=0 Dec 05 11:21:52 crc kubenswrapper[5014]: I1205 11:21:52.226232 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xjmg6" event={"ID":"ee771c6a-d897-4bee-aa99-2e1df58c3dee","Type":"ContainerStarted","Data":"8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba"} Dec 05 11:21:52 crc kubenswrapper[5014]: I1205 11:21:52.254949 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xjmg6" podStartSLOduration=1.825133758 podStartE2EDuration="9.25493144s" podCreationTimestamp="2025-12-05 11:21:43 +0000 UTC" firstStartedPulling="2025-12-05 11:21:44.120793538 +0000 UTC m=+2031.068911242" lastFinishedPulling="2025-12-05 11:21:51.55059122 +0000 UTC m=+2038.498708924" observedRunningTime="2025-12-05 11:21:52.243383719 +0000 UTC m=+2039.191501443" watchObservedRunningTime="2025-12-05 11:21:52.25493144 +0000 UTC m=+2039.203049154" Dec 05 11:21:53 crc kubenswrapper[5014]: I1205 11:21:53.472976 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:53 crc kubenswrapper[5014]: I1205 11:21:53.473637 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:21:54 crc kubenswrapper[5014]: I1205 11:21:54.519222 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xjmg6" podUID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerName="registry-server" probeResult="failure" output=< Dec 05 11:21:54 crc kubenswrapper[5014]: timeout: failed to connect service ":50051" within 1s Dec 05 11:21:54 crc kubenswrapper[5014]: > Dec 05 11:22:03 crc kubenswrapper[5014]: I1205 11:22:03.517821 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:22:03 crc kubenswrapper[5014]: I1205 11:22:03.570403 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:22:03 crc kubenswrapper[5014]: I1205 11:22:03.761105 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xjmg6"] Dec 05 11:22:05 crc kubenswrapper[5014]: I1205 11:22:05.347891 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xjmg6" podUID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerName="registry-server" containerID="cri-o://8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba" gracePeriod=2 Dec 05 11:22:05 crc kubenswrapper[5014]: I1205 11:22:05.589424 5014 scope.go:117] "RemoveContainer" containerID="367313f982c9132ff7236df5ff0db9fc657055bf09cfd30ee498f4926c7248b9" Dec 05 11:22:05 crc kubenswrapper[5014]: I1205 11:22:05.856716 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.016147 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgfk4\" (UniqueName: \"kubernetes.io/projected/ee771c6a-d897-4bee-aa99-2e1df58c3dee-kube-api-access-fgfk4\") pod \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.016482 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-utilities\") pod \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.016590 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-catalog-content\") pod \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\" (UID: \"ee771c6a-d897-4bee-aa99-2e1df58c3dee\") " Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.017641 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-utilities" (OuterVolumeSpecName: "utilities") pod "ee771c6a-d897-4bee-aa99-2e1df58c3dee" (UID: "ee771c6a-d897-4bee-aa99-2e1df58c3dee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.023519 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee771c6a-d897-4bee-aa99-2e1df58c3dee-kube-api-access-fgfk4" (OuterVolumeSpecName: "kube-api-access-fgfk4") pod "ee771c6a-d897-4bee-aa99-2e1df58c3dee" (UID: "ee771c6a-d897-4bee-aa99-2e1df58c3dee"). InnerVolumeSpecName "kube-api-access-fgfk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.119560 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgfk4\" (UniqueName: \"kubernetes.io/projected/ee771c6a-d897-4bee-aa99-2e1df58c3dee-kube-api-access-fgfk4\") on node \"crc\" DevicePath \"\"" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.119602 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.129464 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee771c6a-d897-4bee-aa99-2e1df58c3dee" (UID: "ee771c6a-d897-4bee-aa99-2e1df58c3dee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.220988 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee771c6a-d897-4bee-aa99-2e1df58c3dee-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.358888 5014 generic.go:334] "Generic (PLEG): container finished" podID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerID="8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba" exitCode=0 Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.359196 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xjmg6" event={"ID":"ee771c6a-d897-4bee-aa99-2e1df58c3dee","Type":"ContainerDied","Data":"8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba"} Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.359223 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xjmg6" event={"ID":"ee771c6a-d897-4bee-aa99-2e1df58c3dee","Type":"ContainerDied","Data":"30a374ee9672aab9fce14ac7e39474dbae713668e59fd40998e7b16e0fbf14fa"} Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.359242 5014 scope.go:117] "RemoveContainer" containerID="8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.359377 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xjmg6" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.392930 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xjmg6"] Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.395318 5014 scope.go:117] "RemoveContainer" containerID="fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.402491 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xjmg6"] Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.422050 5014 scope.go:117] "RemoveContainer" containerID="8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.442130 5014 scope.go:117] "RemoveContainer" containerID="8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba" Dec 05 11:22:06 crc kubenswrapper[5014]: E1205 11:22:06.442736 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba\": container with ID starting with 8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba not found: ID does not exist" containerID="8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.442773 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba"} err="failed to get container status \"8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba\": rpc error: code = NotFound desc = could not find container \"8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba\": container with ID starting with 8e50ae71db41f674d5d3ecd51b2b4e2f9c5f15c23eef04df16502e5cacf6f1ba not found: ID does not exist" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.442810 5014 scope.go:117] "RemoveContainer" containerID="fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2" Dec 05 11:22:06 crc kubenswrapper[5014]: E1205 11:22:06.443126 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2\": container with ID starting with fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2 not found: ID does not exist" containerID="fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.443175 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2"} err="failed to get container status \"fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2\": rpc error: code = NotFound desc = could not find container \"fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2\": container with ID starting with fc46721eb9ca10d27acb2eee4435214cabd0b160b8c9302745ce0e738f1c67e2 not found: ID does not exist" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.443195 5014 scope.go:117] "RemoveContainer" containerID="8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7" Dec 05 11:22:06 crc kubenswrapper[5014]: E1205 11:22:06.443461 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7\": container with ID starting with 8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7 not found: ID does not exist" containerID="8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7" Dec 05 11:22:06 crc kubenswrapper[5014]: I1205 11:22:06.443490 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7"} err="failed to get container status \"8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7\": rpc error: code = NotFound desc = could not find container \"8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7\": container with ID starting with 8d1a52d4dbd494bf1037d347749412f20d86802e43046da9661a01db28078ea7 not found: ID does not exist" Dec 05 11:22:07 crc kubenswrapper[5014]: I1205 11:22:07.339182 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" path="/var/lib/kubelet/pods/ee771c6a-d897-4bee-aa99-2e1df58c3dee/volumes" Dec 05 11:22:24 crc kubenswrapper[5014]: I1205 11:22:24.526748 5014 generic.go:334] "Generic (PLEG): container finished" podID="6df89661-8d7a-4ea6-b3ca-4560ecc324f3" containerID="904137f3c375e47475965cebada912acf3794a6951b5384c8f1a0422776abe3b" exitCode=0 Dec 05 11:22:24 crc kubenswrapper[5014]: I1205 11:22:24.526846 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" event={"ID":"6df89661-8d7a-4ea6-b3ca-4560ecc324f3","Type":"ContainerDied","Data":"904137f3c375e47475965cebada912acf3794a6951b5384c8f1a0422776abe3b"} Dec 05 11:22:25 crc kubenswrapper[5014]: I1205 11:22:25.949036 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.116909 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-ssh-key\") pod \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.117090 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkkjj\" (UniqueName: \"kubernetes.io/projected/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-kube-api-access-kkkjj\") pod \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.117161 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-inventory\") pod \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\" (UID: \"6df89661-8d7a-4ea6-b3ca-4560ecc324f3\") " Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.122266 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-kube-api-access-kkkjj" (OuterVolumeSpecName: "kube-api-access-kkkjj") pod "6df89661-8d7a-4ea6-b3ca-4560ecc324f3" (UID: "6df89661-8d7a-4ea6-b3ca-4560ecc324f3"). InnerVolumeSpecName "kube-api-access-kkkjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.144926 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-inventory" (OuterVolumeSpecName: "inventory") pod "6df89661-8d7a-4ea6-b3ca-4560ecc324f3" (UID: "6df89661-8d7a-4ea6-b3ca-4560ecc324f3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.146644 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6df89661-8d7a-4ea6-b3ca-4560ecc324f3" (UID: "6df89661-8d7a-4ea6-b3ca-4560ecc324f3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.220567 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkkjj\" (UniqueName: \"kubernetes.io/projected/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-kube-api-access-kkkjj\") on node \"crc\" DevicePath \"\"" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.220610 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.220623 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6df89661-8d7a-4ea6-b3ca-4560ecc324f3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.545471 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" event={"ID":"6df89661-8d7a-4ea6-b3ca-4560ecc324f3","Type":"ContainerDied","Data":"c611bac10f9420e8fd8e6346427da2bbd50b9f78532f17f971f12c2b5269d363"} Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.545514 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c611bac10f9420e8fd8e6346427da2bbd50b9f78532f17f971f12c2b5269d363" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.545523 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-jns8m" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.661542 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5"] Dec 05 11:22:26 crc kubenswrapper[5014]: E1205 11:22:26.661958 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerName="extract-utilities" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.661973 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerName="extract-utilities" Dec 05 11:22:26 crc kubenswrapper[5014]: E1205 11:22:26.661986 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerName="registry-server" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.661992 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerName="registry-server" Dec 05 11:22:26 crc kubenswrapper[5014]: E1205 11:22:26.662009 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerName="extract-content" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.662015 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerName="extract-content" Dec 05 11:22:26 crc kubenswrapper[5014]: E1205 11:22:26.662029 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6df89661-8d7a-4ea6-b3ca-4560ecc324f3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.662036 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="6df89661-8d7a-4ea6-b3ca-4560ecc324f3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.662234 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="6df89661-8d7a-4ea6-b3ca-4560ecc324f3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.662248 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee771c6a-d897-4bee-aa99-2e1df58c3dee" containerName="registry-server" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.662922 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.666800 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.667240 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.667679 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.667877 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.670667 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5"] Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.831336 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.831718 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.831815 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb5xq\" (UniqueName: \"kubernetes.io/projected/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-kube-api-access-mb5xq\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.934135 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.934289 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb5xq\" (UniqueName: \"kubernetes.io/projected/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-kube-api-access-mb5xq\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.934411 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.938066 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.938720 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.950826 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb5xq\" (UniqueName: \"kubernetes.io/projected/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-kube-api-access-mb5xq\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:26 crc kubenswrapper[5014]: I1205 11:22:26.985159 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:22:27 crc kubenswrapper[5014]: I1205 11:22:27.543399 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5"] Dec 05 11:22:27 crc kubenswrapper[5014]: I1205 11:22:27.560255 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" event={"ID":"aa2bf2b4-c7fa-40e6-adee-d043c47760bc","Type":"ContainerStarted","Data":"94c88e1b303ac538c31a701c128c08ffc26ebed68c8d9c33e5bf199112013a0b"} Dec 05 11:22:28 crc kubenswrapper[5014]: I1205 11:22:28.568996 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" event={"ID":"aa2bf2b4-c7fa-40e6-adee-d043c47760bc","Type":"ContainerStarted","Data":"2c2070998cd4ed3edff39fc0d58c81d26494273882b7cb1c0c566b9997d6d5e8"} Dec 05 11:22:28 crc kubenswrapper[5014]: I1205 11:22:28.589090 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" podStartSLOduration=2.173882826 podStartE2EDuration="2.58893798s" podCreationTimestamp="2025-12-05 11:22:26 +0000 UTC" firstStartedPulling="2025-12-05 11:22:27.551942261 +0000 UTC m=+2074.500059965" lastFinishedPulling="2025-12-05 11:22:27.966997415 +0000 UTC m=+2074.915115119" observedRunningTime="2025-12-05 11:22:28.581494859 +0000 UTC m=+2075.529612563" watchObservedRunningTime="2025-12-05 11:22:28.58893798 +0000 UTC m=+2075.537055684" Dec 05 11:23:18 crc kubenswrapper[5014]: I1205 11:23:18.045141 5014 generic.go:334] "Generic (PLEG): container finished" podID="aa2bf2b4-c7fa-40e6-adee-d043c47760bc" containerID="2c2070998cd4ed3edff39fc0d58c81d26494273882b7cb1c0c566b9997d6d5e8" exitCode=0 Dec 05 11:23:18 crc kubenswrapper[5014]: I1205 11:23:18.045309 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" event={"ID":"aa2bf2b4-c7fa-40e6-adee-d043c47760bc","Type":"ContainerDied","Data":"2c2070998cd4ed3edff39fc0d58c81d26494273882b7cb1c0c566b9997d6d5e8"} Dec 05 11:23:19 crc kubenswrapper[5014]: I1205 11:23:19.589516 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:23:19 crc kubenswrapper[5014]: I1205 11:23:19.673406 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-inventory\") pod \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " Dec 05 11:23:19 crc kubenswrapper[5014]: I1205 11:23:19.673545 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb5xq\" (UniqueName: \"kubernetes.io/projected/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-kube-api-access-mb5xq\") pod \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " Dec 05 11:23:19 crc kubenswrapper[5014]: I1205 11:23:19.673638 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-ssh-key\") pod \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\" (UID: \"aa2bf2b4-c7fa-40e6-adee-d043c47760bc\") " Dec 05 11:23:19 crc kubenswrapper[5014]: I1205 11:23:19.678735 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-kube-api-access-mb5xq" (OuterVolumeSpecName: "kube-api-access-mb5xq") pod "aa2bf2b4-c7fa-40e6-adee-d043c47760bc" (UID: "aa2bf2b4-c7fa-40e6-adee-d043c47760bc"). InnerVolumeSpecName "kube-api-access-mb5xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:23:19 crc kubenswrapper[5014]: I1205 11:23:19.712771 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aa2bf2b4-c7fa-40e6-adee-d043c47760bc" (UID: "aa2bf2b4-c7fa-40e6-adee-d043c47760bc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:23:19 crc kubenswrapper[5014]: I1205 11:23:19.714344 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-inventory" (OuterVolumeSpecName: "inventory") pod "aa2bf2b4-c7fa-40e6-adee-d043c47760bc" (UID: "aa2bf2b4-c7fa-40e6-adee-d043c47760bc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:23:19 crc kubenswrapper[5014]: I1205 11:23:19.776474 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:19 crc kubenswrapper[5014]: I1205 11:23:19.776517 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb5xq\" (UniqueName: \"kubernetes.io/projected/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-kube-api-access-mb5xq\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:19 crc kubenswrapper[5014]: I1205 11:23:19.776528 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa2bf2b4-c7fa-40e6-adee-d043c47760bc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.063529 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" event={"ID":"aa2bf2b4-c7fa-40e6-adee-d043c47760bc","Type":"ContainerDied","Data":"94c88e1b303ac538c31a701c128c08ffc26ebed68c8d9c33e5bf199112013a0b"} Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.063887 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94c88e1b303ac538c31a701c128c08ffc26ebed68c8d9c33e5bf199112013a0b" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.063660 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.160328 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-p24n8"] Dec 05 11:23:20 crc kubenswrapper[5014]: E1205 11:23:20.160812 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa2bf2b4-c7fa-40e6-adee-d043c47760bc" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.160827 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa2bf2b4-c7fa-40e6-adee-d043c47760bc" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.161056 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa2bf2b4-c7fa-40e6-adee-d043c47760bc" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.161829 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.164677 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.164781 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.164778 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.164902 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.172402 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-p24n8"] Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.181863 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgppd\" (UniqueName: \"kubernetes.io/projected/11921594-1098-41c9-8744-7801330f646c-kube-api-access-sgppd\") pod \"ssh-known-hosts-edpm-deployment-p24n8\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.181928 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-p24n8\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.182263 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-p24n8\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.283616 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgppd\" (UniqueName: \"kubernetes.io/projected/11921594-1098-41c9-8744-7801330f646c-kube-api-access-sgppd\") pod \"ssh-known-hosts-edpm-deployment-p24n8\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.283688 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-p24n8\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.283786 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-p24n8\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.287952 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-p24n8\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.288520 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-p24n8\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.300582 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgppd\" (UniqueName: \"kubernetes.io/projected/11921594-1098-41c9-8744-7801330f646c-kube-api-access-sgppd\") pod \"ssh-known-hosts-edpm-deployment-p24n8\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.479338 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:20 crc kubenswrapper[5014]: I1205 11:23:20.997140 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-p24n8"] Dec 05 11:23:21 crc kubenswrapper[5014]: I1205 11:23:21.006734 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:23:21 crc kubenswrapper[5014]: I1205 11:23:21.071122 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" event={"ID":"11921594-1098-41c9-8744-7801330f646c","Type":"ContainerStarted","Data":"0883e9f3b352a483d6a94d313b5b23d4aec5d472b1bd06f2349ad41d92f7d0a6"} Dec 05 11:23:22 crc kubenswrapper[5014]: I1205 11:23:22.080976 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" event={"ID":"11921594-1098-41c9-8744-7801330f646c","Type":"ContainerStarted","Data":"a838ff531e6201c565620a97ec6a8a52bc29166b7ff80b2049d0ec97ec182103"} Dec 05 11:23:22 crc kubenswrapper[5014]: I1205 11:23:22.102808 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" podStartSLOduration=1.670177553 podStartE2EDuration="2.102790684s" podCreationTimestamp="2025-12-05 11:23:20 +0000 UTC" firstStartedPulling="2025-12-05 11:23:21.006471672 +0000 UTC m=+2127.954589376" lastFinishedPulling="2025-12-05 11:23:21.439084803 +0000 UTC m=+2128.387202507" observedRunningTime="2025-12-05 11:23:22.099303059 +0000 UTC m=+2129.047420763" watchObservedRunningTime="2025-12-05 11:23:22.102790684 +0000 UTC m=+2129.050908388" Dec 05 11:23:29 crc kubenswrapper[5014]: I1205 11:23:29.137984 5014 generic.go:334] "Generic (PLEG): container finished" podID="11921594-1098-41c9-8744-7801330f646c" containerID="a838ff531e6201c565620a97ec6a8a52bc29166b7ff80b2049d0ec97ec182103" exitCode=0 Dec 05 11:23:29 crc kubenswrapper[5014]: I1205 11:23:29.138209 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" event={"ID":"11921594-1098-41c9-8744-7801330f646c","Type":"ContainerDied","Data":"a838ff531e6201c565620a97ec6a8a52bc29166b7ff80b2049d0ec97ec182103"} Dec 05 11:23:30 crc kubenswrapper[5014]: I1205 11:23:30.568262 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:30 crc kubenswrapper[5014]: I1205 11:23:30.604643 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgppd\" (UniqueName: \"kubernetes.io/projected/11921594-1098-41c9-8744-7801330f646c-kube-api-access-sgppd\") pod \"11921594-1098-41c9-8744-7801330f646c\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " Dec 05 11:23:30 crc kubenswrapper[5014]: I1205 11:23:30.604733 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-ssh-key-openstack-edpm-ipam\") pod \"11921594-1098-41c9-8744-7801330f646c\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " Dec 05 11:23:30 crc kubenswrapper[5014]: I1205 11:23:30.604825 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-inventory-0\") pod \"11921594-1098-41c9-8744-7801330f646c\" (UID: \"11921594-1098-41c9-8744-7801330f646c\") " Dec 05 11:23:30 crc kubenswrapper[5014]: I1205 11:23:30.611581 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11921594-1098-41c9-8744-7801330f646c-kube-api-access-sgppd" (OuterVolumeSpecName: "kube-api-access-sgppd") pod "11921594-1098-41c9-8744-7801330f646c" (UID: "11921594-1098-41c9-8744-7801330f646c"). InnerVolumeSpecName "kube-api-access-sgppd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:23:30 crc kubenswrapper[5014]: I1205 11:23:30.643242 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "11921594-1098-41c9-8744-7801330f646c" (UID: "11921594-1098-41c9-8744-7801330f646c"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:23:30 crc kubenswrapper[5014]: I1205 11:23:30.646346 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "11921594-1098-41c9-8744-7801330f646c" (UID: "11921594-1098-41c9-8744-7801330f646c"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:23:30 crc kubenswrapper[5014]: I1205 11:23:30.706579 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgppd\" (UniqueName: \"kubernetes.io/projected/11921594-1098-41c9-8744-7801330f646c-kube-api-access-sgppd\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:30 crc kubenswrapper[5014]: I1205 11:23:30.706626 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:30 crc kubenswrapper[5014]: I1205 11:23:30.706641 5014 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/11921594-1098-41c9-8744-7801330f646c-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.156671 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" event={"ID":"11921594-1098-41c9-8744-7801330f646c","Type":"ContainerDied","Data":"0883e9f3b352a483d6a94d313b5b23d4aec5d472b1bd06f2349ad41d92f7d0a6"} Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.157056 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0883e9f3b352a483d6a94d313b5b23d4aec5d472b1bd06f2349ad41d92f7d0a6" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.156752 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-p24n8" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.232643 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr"] Dec 05 11:23:31 crc kubenswrapper[5014]: E1205 11:23:31.233083 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11921594-1098-41c9-8744-7801330f646c" containerName="ssh-known-hosts-edpm-deployment" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.233102 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="11921594-1098-41c9-8744-7801330f646c" containerName="ssh-known-hosts-edpm-deployment" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.233262 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="11921594-1098-41c9-8744-7801330f646c" containerName="ssh-known-hosts-edpm-deployment" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.233877 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.235846 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.236098 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.236779 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.237032 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.248987 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr"] Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.316107 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs8sg\" (UniqueName: \"kubernetes.io/projected/560529fa-7baf-4bce-b55b-3816b5c7928c-kube-api-access-bs8sg\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bfwkr\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.316166 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bfwkr\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.316191 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bfwkr\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.418147 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bfwkr\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.418200 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bfwkr\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.418700 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs8sg\" (UniqueName: \"kubernetes.io/projected/560529fa-7baf-4bce-b55b-3816b5c7928c-kube-api-access-bs8sg\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bfwkr\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.422586 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bfwkr\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.424400 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bfwkr\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.442401 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs8sg\" (UniqueName: \"kubernetes.io/projected/560529fa-7baf-4bce-b55b-3816b5c7928c-kube-api-access-bs8sg\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bfwkr\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:31 crc kubenswrapper[5014]: I1205 11:23:31.556551 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:32 crc kubenswrapper[5014]: I1205 11:23:32.129936 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr"] Dec 05 11:23:32 crc kubenswrapper[5014]: I1205 11:23:32.167152 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" event={"ID":"560529fa-7baf-4bce-b55b-3816b5c7928c","Type":"ContainerStarted","Data":"d5d751072d0a22306375e1f60c858d9e0b51e48ff584734be525c26d0f750a66"} Dec 05 11:23:32 crc kubenswrapper[5014]: I1205 11:23:32.936789 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:23:32 crc kubenswrapper[5014]: I1205 11:23:32.937157 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:23:33 crc kubenswrapper[5014]: I1205 11:23:33.176699 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" event={"ID":"560529fa-7baf-4bce-b55b-3816b5c7928c","Type":"ContainerStarted","Data":"15a64df500d1e0a003080c038d3770772eb04c251a3f0f0f2aab58258876d974"} Dec 05 11:23:33 crc kubenswrapper[5014]: I1205 11:23:33.199355 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" podStartSLOduration=1.8062590219999999 podStartE2EDuration="2.199334817s" podCreationTimestamp="2025-12-05 11:23:31 +0000 UTC" firstStartedPulling="2025-12-05 11:23:32.141867724 +0000 UTC m=+2139.089985428" lastFinishedPulling="2025-12-05 11:23:32.534943519 +0000 UTC m=+2139.483061223" observedRunningTime="2025-12-05 11:23:33.192723045 +0000 UTC m=+2140.140840769" watchObservedRunningTime="2025-12-05 11:23:33.199334817 +0000 UTC m=+2140.147452531" Dec 05 11:23:41 crc kubenswrapper[5014]: I1205 11:23:41.250848 5014 generic.go:334] "Generic (PLEG): container finished" podID="560529fa-7baf-4bce-b55b-3816b5c7928c" containerID="15a64df500d1e0a003080c038d3770772eb04c251a3f0f0f2aab58258876d974" exitCode=0 Dec 05 11:23:41 crc kubenswrapper[5014]: I1205 11:23:41.250936 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" event={"ID":"560529fa-7baf-4bce-b55b-3816b5c7928c","Type":"ContainerDied","Data":"15a64df500d1e0a003080c038d3770772eb04c251a3f0f0f2aab58258876d974"} Dec 05 11:23:42 crc kubenswrapper[5014]: I1205 11:23:42.780459 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:42 crc kubenswrapper[5014]: I1205 11:23:42.860497 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-inventory\") pod \"560529fa-7baf-4bce-b55b-3816b5c7928c\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " Dec 05 11:23:42 crc kubenswrapper[5014]: I1205 11:23:42.860574 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bs8sg\" (UniqueName: \"kubernetes.io/projected/560529fa-7baf-4bce-b55b-3816b5c7928c-kube-api-access-bs8sg\") pod \"560529fa-7baf-4bce-b55b-3816b5c7928c\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " Dec 05 11:23:42 crc kubenswrapper[5014]: I1205 11:23:42.860683 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-ssh-key\") pod \"560529fa-7baf-4bce-b55b-3816b5c7928c\" (UID: \"560529fa-7baf-4bce-b55b-3816b5c7928c\") " Dec 05 11:23:42 crc kubenswrapper[5014]: I1205 11:23:42.865996 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/560529fa-7baf-4bce-b55b-3816b5c7928c-kube-api-access-bs8sg" (OuterVolumeSpecName: "kube-api-access-bs8sg") pod "560529fa-7baf-4bce-b55b-3816b5c7928c" (UID: "560529fa-7baf-4bce-b55b-3816b5c7928c"). InnerVolumeSpecName "kube-api-access-bs8sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:23:42 crc kubenswrapper[5014]: I1205 11:23:42.888566 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-inventory" (OuterVolumeSpecName: "inventory") pod "560529fa-7baf-4bce-b55b-3816b5c7928c" (UID: "560529fa-7baf-4bce-b55b-3816b5c7928c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:23:42 crc kubenswrapper[5014]: I1205 11:23:42.889395 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "560529fa-7baf-4bce-b55b-3816b5c7928c" (UID: "560529fa-7baf-4bce-b55b-3816b5c7928c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:23:42 crc kubenswrapper[5014]: I1205 11:23:42.963094 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:42 crc kubenswrapper[5014]: I1205 11:23:42.963122 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bs8sg\" (UniqueName: \"kubernetes.io/projected/560529fa-7baf-4bce-b55b-3816b5c7928c-kube-api-access-bs8sg\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:42 crc kubenswrapper[5014]: I1205 11:23:42.963133 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/560529fa-7baf-4bce-b55b-3816b5c7928c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.269059 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" event={"ID":"560529fa-7baf-4bce-b55b-3816b5c7928c","Type":"ContainerDied","Data":"d5d751072d0a22306375e1f60c858d9e0b51e48ff584734be525c26d0f750a66"} Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.269104 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5d751072d0a22306375e1f60c858d9e0b51e48ff584734be525c26d0f750a66" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.269147 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bfwkr" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.342454 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns"] Dec 05 11:23:43 crc kubenswrapper[5014]: E1205 11:23:43.342903 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="560529fa-7baf-4bce-b55b-3816b5c7928c" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.342921 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="560529fa-7baf-4bce-b55b-3816b5c7928c" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.343130 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="560529fa-7baf-4bce-b55b-3816b5c7928c" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.343898 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.345592 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.345829 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.345855 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.346462 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.363979 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns"] Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.472739 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.472870 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrdsk\" (UniqueName: \"kubernetes.io/projected/9b908cdd-21cf-4f71-8bc7-83db13979563-kube-api-access-wrdsk\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.472936 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.576341 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.576518 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.577038 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrdsk\" (UniqueName: \"kubernetes.io/projected/9b908cdd-21cf-4f71-8bc7-83db13979563-kube-api-access-wrdsk\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.581116 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.581385 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.597729 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrdsk\" (UniqueName: \"kubernetes.io/projected/9b908cdd-21cf-4f71-8bc7-83db13979563-kube-api-access-wrdsk\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:43 crc kubenswrapper[5014]: I1205 11:23:43.666836 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:44 crc kubenswrapper[5014]: I1205 11:23:44.202494 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns"] Dec 05 11:23:44 crc kubenswrapper[5014]: I1205 11:23:44.277581 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" event={"ID":"9b908cdd-21cf-4f71-8bc7-83db13979563","Type":"ContainerStarted","Data":"2155e1ed214ee97d6299ebe0028b0e8834c814c667d0dcd1fe39ce03a6608b3e"} Dec 05 11:23:45 crc kubenswrapper[5014]: I1205 11:23:45.287432 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" event={"ID":"9b908cdd-21cf-4f71-8bc7-83db13979563","Type":"ContainerStarted","Data":"c6966367d4075c4745d6645eb44c93b9ea4bb0f66a9eafdb8537848283dbc9cf"} Dec 05 11:23:45 crc kubenswrapper[5014]: I1205 11:23:45.314150 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" podStartSLOduration=1.853659774 podStartE2EDuration="2.314130185s" podCreationTimestamp="2025-12-05 11:23:43 +0000 UTC" firstStartedPulling="2025-12-05 11:23:44.219542686 +0000 UTC m=+2151.167660390" lastFinishedPulling="2025-12-05 11:23:44.680013077 +0000 UTC m=+2151.628130801" observedRunningTime="2025-12-05 11:23:45.301602159 +0000 UTC m=+2152.249719883" watchObservedRunningTime="2025-12-05 11:23:45.314130185 +0000 UTC m=+2152.262247889" Dec 05 11:23:55 crc kubenswrapper[5014]: I1205 11:23:55.369515 5014 generic.go:334] "Generic (PLEG): container finished" podID="9b908cdd-21cf-4f71-8bc7-83db13979563" containerID="c6966367d4075c4745d6645eb44c93b9ea4bb0f66a9eafdb8537848283dbc9cf" exitCode=0 Dec 05 11:23:55 crc kubenswrapper[5014]: I1205 11:23:55.369589 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" event={"ID":"9b908cdd-21cf-4f71-8bc7-83db13979563","Type":"ContainerDied","Data":"c6966367d4075c4745d6645eb44c93b9ea4bb0f66a9eafdb8537848283dbc9cf"} Dec 05 11:23:56 crc kubenswrapper[5014]: I1205 11:23:56.841673 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.021426 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-ssh-key\") pod \"9b908cdd-21cf-4f71-8bc7-83db13979563\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.021522 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrdsk\" (UniqueName: \"kubernetes.io/projected/9b908cdd-21cf-4f71-8bc7-83db13979563-kube-api-access-wrdsk\") pod \"9b908cdd-21cf-4f71-8bc7-83db13979563\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.021603 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-inventory\") pod \"9b908cdd-21cf-4f71-8bc7-83db13979563\" (UID: \"9b908cdd-21cf-4f71-8bc7-83db13979563\") " Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.027529 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b908cdd-21cf-4f71-8bc7-83db13979563-kube-api-access-wrdsk" (OuterVolumeSpecName: "kube-api-access-wrdsk") pod "9b908cdd-21cf-4f71-8bc7-83db13979563" (UID: "9b908cdd-21cf-4f71-8bc7-83db13979563"). InnerVolumeSpecName "kube-api-access-wrdsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.049214 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-inventory" (OuterVolumeSpecName: "inventory") pod "9b908cdd-21cf-4f71-8bc7-83db13979563" (UID: "9b908cdd-21cf-4f71-8bc7-83db13979563"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.056428 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9b908cdd-21cf-4f71-8bc7-83db13979563" (UID: "9b908cdd-21cf-4f71-8bc7-83db13979563"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.124052 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.124088 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrdsk\" (UniqueName: \"kubernetes.io/projected/9b908cdd-21cf-4f71-8bc7-83db13979563-kube-api-access-wrdsk\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.124103 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b908cdd-21cf-4f71-8bc7-83db13979563-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.391632 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" event={"ID":"9b908cdd-21cf-4f71-8bc7-83db13979563","Type":"ContainerDied","Data":"2155e1ed214ee97d6299ebe0028b0e8834c814c667d0dcd1fe39ce03a6608b3e"} Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.391684 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2155e1ed214ee97d6299ebe0028b0e8834c814c667d0dcd1fe39ce03a6608b3e" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.391791 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.525760 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn"] Dec 05 11:23:57 crc kubenswrapper[5014]: E1205 11:23:57.526479 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b908cdd-21cf-4f71-8bc7-83db13979563" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.526502 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b908cdd-21cf-4f71-8bc7-83db13979563" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.526706 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b908cdd-21cf-4f71-8bc7-83db13979563" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.527399 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.529163 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.530359 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.530600 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.530834 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.530951 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.531105 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.531228 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.531389 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537310 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537374 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537463 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537491 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw2xc\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-kube-api-access-vw2xc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537518 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537542 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537560 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537585 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537648 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537674 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537726 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537772 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537795 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.537828 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.541570 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn"] Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.639254 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.639359 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.639399 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.639466 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.639485 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw2xc\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-kube-api-access-vw2xc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.639982 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.640004 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.640024 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.640044 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.640087 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.640109 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.640143 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.640175 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.640208 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.643088 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.643505 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.643760 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.645786 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.646530 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.646530 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.647021 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.650933 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.651594 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.655875 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.655976 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.656669 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.658977 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.662285 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw2xc\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-kube-api-access-vw2xc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:57 crc kubenswrapper[5014]: I1205 11:23:57.855096 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:23:58 crc kubenswrapper[5014]: I1205 11:23:58.383878 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn"] Dec 05 11:23:58 crc kubenswrapper[5014]: W1205 11:23:58.388518 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf632ba62_c6d0_4229_9d26_cf78c7738723.slice/crio-55f14284da4f288708a39339f79e649029b536260100c79a1cb33e2517fea68a WatchSource:0}: Error finding container 55f14284da4f288708a39339f79e649029b536260100c79a1cb33e2517fea68a: Status 404 returned error can't find the container with id 55f14284da4f288708a39339f79e649029b536260100c79a1cb33e2517fea68a Dec 05 11:23:58 crc kubenswrapper[5014]: I1205 11:23:58.402231 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" event={"ID":"f632ba62-c6d0-4229-9d26-cf78c7738723","Type":"ContainerStarted","Data":"55f14284da4f288708a39339f79e649029b536260100c79a1cb33e2517fea68a"} Dec 05 11:24:01 crc kubenswrapper[5014]: I1205 11:24:01.443514 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" event={"ID":"f632ba62-c6d0-4229-9d26-cf78c7738723","Type":"ContainerStarted","Data":"9e5ef41356cc27a3212fc8367e8eceb498c60489e5656203a454b5ae0d95c3dd"} Dec 05 11:24:01 crc kubenswrapper[5014]: I1205 11:24:01.470849 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" podStartSLOduration=2.5893928600000002 podStartE2EDuration="4.470832942s" podCreationTimestamp="2025-12-05 11:23:57 +0000 UTC" firstStartedPulling="2025-12-05 11:23:58.391513805 +0000 UTC m=+2165.339631509" lastFinishedPulling="2025-12-05 11:24:00.272953887 +0000 UTC m=+2167.221071591" observedRunningTime="2025-12-05 11:24:01.464132658 +0000 UTC m=+2168.412250382" watchObservedRunningTime="2025-12-05 11:24:01.470832942 +0000 UTC m=+2168.418950646" Dec 05 11:24:02 crc kubenswrapper[5014]: I1205 11:24:02.936579 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:24:02 crc kubenswrapper[5014]: I1205 11:24:02.936916 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:24:18 crc kubenswrapper[5014]: I1205 11:24:18.840289 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7nqbg"] Dec 05 11:24:18 crc kubenswrapper[5014]: I1205 11:24:18.842673 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:18 crc kubenswrapper[5014]: I1205 11:24:18.863938 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7nqbg"] Dec 05 11:24:18 crc kubenswrapper[5014]: I1205 11:24:18.971691 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-catalog-content\") pod \"redhat-marketplace-7nqbg\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:18 crc kubenswrapper[5014]: I1205 11:24:18.971745 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z68tp\" (UniqueName: \"kubernetes.io/projected/ccd43f72-86d4-4d13-88ba-b54cd342d531-kube-api-access-z68tp\") pod \"redhat-marketplace-7nqbg\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:18 crc kubenswrapper[5014]: I1205 11:24:18.971854 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-utilities\") pod \"redhat-marketplace-7nqbg\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:19 crc kubenswrapper[5014]: I1205 11:24:19.074744 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-catalog-content\") pod \"redhat-marketplace-7nqbg\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:19 crc kubenswrapper[5014]: I1205 11:24:19.074793 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z68tp\" (UniqueName: \"kubernetes.io/projected/ccd43f72-86d4-4d13-88ba-b54cd342d531-kube-api-access-z68tp\") pod \"redhat-marketplace-7nqbg\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:19 crc kubenswrapper[5014]: I1205 11:24:19.074862 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-utilities\") pod \"redhat-marketplace-7nqbg\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:19 crc kubenswrapper[5014]: I1205 11:24:19.076113 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-utilities\") pod \"redhat-marketplace-7nqbg\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:19 crc kubenswrapper[5014]: I1205 11:24:19.076552 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-catalog-content\") pod \"redhat-marketplace-7nqbg\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:19 crc kubenswrapper[5014]: I1205 11:24:19.095911 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z68tp\" (UniqueName: \"kubernetes.io/projected/ccd43f72-86d4-4d13-88ba-b54cd342d531-kube-api-access-z68tp\") pod \"redhat-marketplace-7nqbg\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:19 crc kubenswrapper[5014]: I1205 11:24:19.167009 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:19 crc kubenswrapper[5014]: I1205 11:24:19.652227 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7nqbg"] Dec 05 11:24:20 crc kubenswrapper[5014]: I1205 11:24:20.640199 5014 generic.go:334] "Generic (PLEG): container finished" podID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerID="f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21" exitCode=0 Dec 05 11:24:20 crc kubenswrapper[5014]: I1205 11:24:20.640341 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7nqbg" event={"ID":"ccd43f72-86d4-4d13-88ba-b54cd342d531","Type":"ContainerDied","Data":"f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21"} Dec 05 11:24:20 crc kubenswrapper[5014]: I1205 11:24:20.640583 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7nqbg" event={"ID":"ccd43f72-86d4-4d13-88ba-b54cd342d531","Type":"ContainerStarted","Data":"af3bfc1c7f9a73e7caaa0c4f9d500d92d7b32473a4a47be390ae45c05dbafe3b"} Dec 05 11:24:24 crc kubenswrapper[5014]: I1205 11:24:24.680085 5014 generic.go:334] "Generic (PLEG): container finished" podID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerID="edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90" exitCode=0 Dec 05 11:24:24 crc kubenswrapper[5014]: I1205 11:24:24.680363 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7nqbg" event={"ID":"ccd43f72-86d4-4d13-88ba-b54cd342d531","Type":"ContainerDied","Data":"edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90"} Dec 05 11:24:25 crc kubenswrapper[5014]: I1205 11:24:25.692258 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7nqbg" event={"ID":"ccd43f72-86d4-4d13-88ba-b54cd342d531","Type":"ContainerStarted","Data":"9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026"} Dec 05 11:24:25 crc kubenswrapper[5014]: I1205 11:24:25.723721 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7nqbg" podStartSLOduration=3.044974984 podStartE2EDuration="7.723696006s" podCreationTimestamp="2025-12-05 11:24:18 +0000 UTC" firstStartedPulling="2025-12-05 11:24:20.642697096 +0000 UTC m=+2187.590814800" lastFinishedPulling="2025-12-05 11:24:25.321418118 +0000 UTC m=+2192.269535822" observedRunningTime="2025-12-05 11:24:25.711821385 +0000 UTC m=+2192.659939099" watchObservedRunningTime="2025-12-05 11:24:25.723696006 +0000 UTC m=+2192.671813720" Dec 05 11:24:29 crc kubenswrapper[5014]: I1205 11:24:29.167944 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:29 crc kubenswrapper[5014]: I1205 11:24:29.168554 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:29 crc kubenswrapper[5014]: I1205 11:24:29.219501 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:32 crc kubenswrapper[5014]: I1205 11:24:32.937196 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:24:32 crc kubenswrapper[5014]: I1205 11:24:32.938478 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:24:32 crc kubenswrapper[5014]: I1205 11:24:32.938551 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:24:32 crc kubenswrapper[5014]: I1205 11:24:32.939192 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b1d3a1f29e694656b97d30c4a386bb91494e6280ba532e3e802b9a1c08bf1bf2"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:24:32 crc kubenswrapper[5014]: I1205 11:24:32.939257 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://b1d3a1f29e694656b97d30c4a386bb91494e6280ba532e3e802b9a1c08bf1bf2" gracePeriod=600 Dec 05 11:24:33 crc kubenswrapper[5014]: I1205 11:24:33.759205 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="b1d3a1f29e694656b97d30c4a386bb91494e6280ba532e3e802b9a1c08bf1bf2" exitCode=0 Dec 05 11:24:33 crc kubenswrapper[5014]: I1205 11:24:33.759305 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"b1d3a1f29e694656b97d30c4a386bb91494e6280ba532e3e802b9a1c08bf1bf2"} Dec 05 11:24:33 crc kubenswrapper[5014]: I1205 11:24:33.759890 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a"} Dec 05 11:24:33 crc kubenswrapper[5014]: I1205 11:24:33.759913 5014 scope.go:117] "RemoveContainer" containerID="382230f38a5a921a381332c615f3d4222697be0a24382dd662fde22cbae28889" Dec 05 11:24:39 crc kubenswrapper[5014]: I1205 11:24:39.223982 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:39 crc kubenswrapper[5014]: I1205 11:24:39.270312 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7nqbg"] Dec 05 11:24:39 crc kubenswrapper[5014]: I1205 11:24:39.819102 5014 generic.go:334] "Generic (PLEG): container finished" podID="f632ba62-c6d0-4229-9d26-cf78c7738723" containerID="9e5ef41356cc27a3212fc8367e8eceb498c60489e5656203a454b5ae0d95c3dd" exitCode=0 Dec 05 11:24:39 crc kubenswrapper[5014]: I1205 11:24:39.819208 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" event={"ID":"f632ba62-c6d0-4229-9d26-cf78c7738723","Type":"ContainerDied","Data":"9e5ef41356cc27a3212fc8367e8eceb498c60489e5656203a454b5ae0d95c3dd"} Dec 05 11:24:39 crc kubenswrapper[5014]: I1205 11:24:39.819338 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7nqbg" podUID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerName="registry-server" containerID="cri-o://9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026" gracePeriod=2 Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.810342 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.832264 5014 generic.go:334] "Generic (PLEG): container finished" podID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerID="9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026" exitCode=0 Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.832366 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7nqbg" event={"ID":"ccd43f72-86d4-4d13-88ba-b54cd342d531","Type":"ContainerDied","Data":"9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026"} Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.832412 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7nqbg" event={"ID":"ccd43f72-86d4-4d13-88ba-b54cd342d531","Type":"ContainerDied","Data":"af3bfc1c7f9a73e7caaa0c4f9d500d92d7b32473a4a47be390ae45c05dbafe3b"} Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.832430 5014 scope.go:117] "RemoveContainer" containerID="9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026" Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.832455 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7nqbg" Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.859785 5014 scope.go:117] "RemoveContainer" containerID="edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90" Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.898098 5014 scope.go:117] "RemoveContainer" containerID="f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21" Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.945454 5014 scope.go:117] "RemoveContainer" containerID="9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026" Dec 05 11:24:40 crc kubenswrapper[5014]: E1205 11:24:40.945955 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026\": container with ID starting with 9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026 not found: ID does not exist" containerID="9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026" Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.945992 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026"} err="failed to get container status \"9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026\": rpc error: code = NotFound desc = could not find container \"9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026\": container with ID starting with 9f28cb546612fd952605cb20f7d5f12cb58717f3d9c508f78203810c53508026 not found: ID does not exist" Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.946019 5014 scope.go:117] "RemoveContainer" containerID="edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90" Dec 05 11:24:40 crc kubenswrapper[5014]: E1205 11:24:40.946365 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90\": container with ID starting with edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90 not found: ID does not exist" containerID="edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90" Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.946400 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90"} err="failed to get container status \"edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90\": rpc error: code = NotFound desc = could not find container \"edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90\": container with ID starting with edabd386bab7ea936c2813fc1b8177ee04194ed6e2cab77abd9f73f38c4e3c90 not found: ID does not exist" Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.946427 5014 scope.go:117] "RemoveContainer" containerID="f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21" Dec 05 11:24:40 crc kubenswrapper[5014]: E1205 11:24:40.946812 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21\": container with ID starting with f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21 not found: ID does not exist" containerID="f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21" Dec 05 11:24:40 crc kubenswrapper[5014]: I1205 11:24:40.946844 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21"} err="failed to get container status \"f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21\": rpc error: code = NotFound desc = could not find container \"f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21\": container with ID starting with f28f0e060013a6ccf5f474fd54046f8ffa8617e6752602fc78438230e18a9b21 not found: ID does not exist" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.011033 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-catalog-content\") pod \"ccd43f72-86d4-4d13-88ba-b54cd342d531\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.011160 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z68tp\" (UniqueName: \"kubernetes.io/projected/ccd43f72-86d4-4d13-88ba-b54cd342d531-kube-api-access-z68tp\") pod \"ccd43f72-86d4-4d13-88ba-b54cd342d531\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.011224 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-utilities\") pod \"ccd43f72-86d4-4d13-88ba-b54cd342d531\" (UID: \"ccd43f72-86d4-4d13-88ba-b54cd342d531\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.012445 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-utilities" (OuterVolumeSpecName: "utilities") pod "ccd43f72-86d4-4d13-88ba-b54cd342d531" (UID: "ccd43f72-86d4-4d13-88ba-b54cd342d531"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.017934 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccd43f72-86d4-4d13-88ba-b54cd342d531-kube-api-access-z68tp" (OuterVolumeSpecName: "kube-api-access-z68tp") pod "ccd43f72-86d4-4d13-88ba-b54cd342d531" (UID: "ccd43f72-86d4-4d13-88ba-b54cd342d531"). InnerVolumeSpecName "kube-api-access-z68tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.029356 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ccd43f72-86d4-4d13-88ba-b54cd342d531" (UID: "ccd43f72-86d4-4d13-88ba-b54cd342d531"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.113046 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z68tp\" (UniqueName: \"kubernetes.io/projected/ccd43f72-86d4-4d13-88ba-b54cd342d531-kube-api-access-z68tp\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.113077 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.113086 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccd43f72-86d4-4d13-88ba-b54cd342d531-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.259892 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.278316 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7nqbg"] Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.311424 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7nqbg"] Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.332419 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccd43f72-86d4-4d13-88ba-b54cd342d531" path="/var/lib/kubelet/pods/ccd43f72-86d4-4d13-88ba-b54cd342d531/volumes" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.353303 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.353403 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-nova-combined-ca-bundle\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.353543 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-neutron-metadata-combined-ca-bundle\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.356633 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-repo-setup-combined-ca-bundle\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.356724 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-inventory\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.356749 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-bootstrap-combined-ca-bundle\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.356797 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-libvirt-combined-ca-bundle\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.356821 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.356844 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ssh-key\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.356961 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-ovn-default-certs-0\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.357002 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-telemetry-combined-ca-bundle\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.357079 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.357121 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw2xc\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-kube-api-access-vw2xc\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.357153 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ovn-combined-ca-bundle\") pod \"f632ba62-c6d0-4229-9d26-cf78c7738723\" (UID: \"f632ba62-c6d0-4229-9d26-cf78c7738723\") " Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.361204 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.364494 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.364837 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.365363 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.374915 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.374977 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.374987 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.374985 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.375450 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.375521 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.376699 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.376772 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-kube-api-access-vw2xc" (OuterVolumeSpecName: "kube-api-access-vw2xc") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "kube-api-access-vw2xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.402583 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.409930 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-inventory" (OuterVolumeSpecName: "inventory") pod "f632ba62-c6d0-4229-9d26-cf78c7738723" (UID: "f632ba62-c6d0-4229-9d26-cf78c7738723"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461200 5014 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461616 5014 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461636 5014 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461650 5014 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461663 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461674 5014 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461689 5014 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461701 5014 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461715 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461728 5014 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461742 5014 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461755 5014 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461769 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw2xc\" (UniqueName: \"kubernetes.io/projected/f632ba62-c6d0-4229-9d26-cf78c7738723-kube-api-access-vw2xc\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.461782 5014 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f632ba62-c6d0-4229-9d26-cf78c7738723-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.847299 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" event={"ID":"f632ba62-c6d0-4229-9d26-cf78c7738723","Type":"ContainerDied","Data":"55f14284da4f288708a39339f79e649029b536260100c79a1cb33e2517fea68a"} Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.847700 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55f14284da4f288708a39339f79e649029b536260100c79a1cb33e2517fea68a" Dec 05 11:24:41 crc kubenswrapper[5014]: I1205 11:24:41.847327 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.047083 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk"] Dec 05 11:24:42 crc kubenswrapper[5014]: E1205 11:24:42.047468 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerName="registry-server" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.047484 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerName="registry-server" Dec 05 11:24:42 crc kubenswrapper[5014]: E1205 11:24:42.047499 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f632ba62-c6d0-4229-9d26-cf78c7738723" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.047506 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f632ba62-c6d0-4229-9d26-cf78c7738723" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 11:24:42 crc kubenswrapper[5014]: E1205 11:24:42.047519 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerName="extract-content" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.047525 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerName="extract-content" Dec 05 11:24:42 crc kubenswrapper[5014]: E1205 11:24:42.047532 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerName="extract-utilities" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.047537 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerName="extract-utilities" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.047709 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd43f72-86d4-4d13-88ba-b54cd342d531" containerName="registry-server" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.047726 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f632ba62-c6d0-4229-9d26-cf78c7738723" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.048334 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.052494 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.052890 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.053018 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.053144 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.053251 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.073380 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk"] Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.175122 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.175539 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxz2s\" (UniqueName: \"kubernetes.io/projected/1d432303-8ec7-44e2-8a87-d5e5c8c59979-kube-api-access-jxz2s\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.175614 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.175687 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.175759 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.277411 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.277472 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.277532 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxz2s\" (UniqueName: \"kubernetes.io/projected/1d432303-8ec7-44e2-8a87-d5e5c8c59979-kube-api-access-jxz2s\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.277562 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.277612 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.278337 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.282467 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.284944 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.287192 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.306203 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxz2s\" (UniqueName: \"kubernetes.io/projected/1d432303-8ec7-44e2-8a87-d5e5c8c59979-kube-api-access-jxz2s\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dsjzk\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.371480 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:24:42 crc kubenswrapper[5014]: I1205 11:24:42.955441 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk"] Dec 05 11:24:43 crc kubenswrapper[5014]: I1205 11:24:43.874856 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" event={"ID":"1d432303-8ec7-44e2-8a87-d5e5c8c59979","Type":"ContainerStarted","Data":"ab8edd169e834ea913aba65cd318b6829b1f84d0b964977744628eee254874df"} Dec 05 11:24:43 crc kubenswrapper[5014]: I1205 11:24:43.874921 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" event={"ID":"1d432303-8ec7-44e2-8a87-d5e5c8c59979","Type":"ContainerStarted","Data":"0612b1f980299e04158aa6c267ec1f5bd4677acbdf6f1caf056409b8783ca945"} Dec 05 11:24:43 crc kubenswrapper[5014]: I1205 11:24:43.899822 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" podStartSLOduration=1.44743971 podStartE2EDuration="1.899800943s" podCreationTimestamp="2025-12-05 11:24:42 +0000 UTC" firstStartedPulling="2025-12-05 11:24:42.95676066 +0000 UTC m=+2209.904878384" lastFinishedPulling="2025-12-05 11:24:43.409121903 +0000 UTC m=+2210.357239617" observedRunningTime="2025-12-05 11:24:43.891240933 +0000 UTC m=+2210.839358657" watchObservedRunningTime="2025-12-05 11:24:43.899800943 +0000 UTC m=+2210.847918657" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.294532 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-25k7z"] Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.297567 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.303485 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-25k7z"] Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.455236 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpxp5\" (UniqueName: \"kubernetes.io/projected/20684548-2ff4-4e91-b7c0-62bc9e42640d-kube-api-access-mpxp5\") pod \"certified-operators-25k7z\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.455456 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-catalog-content\") pod \"certified-operators-25k7z\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.455559 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-utilities\") pod \"certified-operators-25k7z\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.556756 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-catalog-content\") pod \"certified-operators-25k7z\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.557144 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-utilities\") pod \"certified-operators-25k7z\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.557292 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpxp5\" (UniqueName: \"kubernetes.io/projected/20684548-2ff4-4e91-b7c0-62bc9e42640d-kube-api-access-mpxp5\") pod \"certified-operators-25k7z\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.558224 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-catalog-content\") pod \"certified-operators-25k7z\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.558616 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-utilities\") pod \"certified-operators-25k7z\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.589074 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpxp5\" (UniqueName: \"kubernetes.io/projected/20684548-2ff4-4e91-b7c0-62bc9e42640d-kube-api-access-mpxp5\") pod \"certified-operators-25k7z\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:44 crc kubenswrapper[5014]: I1205 11:25:44.619797 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:45 crc kubenswrapper[5014]: I1205 11:25:45.122687 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-25k7z"] Dec 05 11:25:45 crc kubenswrapper[5014]: I1205 11:25:45.418514 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-25k7z" event={"ID":"20684548-2ff4-4e91-b7c0-62bc9e42640d","Type":"ContainerStarted","Data":"db56b40ea3d6edcb66e8d95cff71eb873000e4dca2189c47871509da207983ec"} Dec 05 11:25:46 crc kubenswrapper[5014]: I1205 11:25:46.432994 5014 generic.go:334] "Generic (PLEG): container finished" podID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerID="f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b" exitCode=0 Dec 05 11:25:46 crc kubenswrapper[5014]: I1205 11:25:46.433094 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-25k7z" event={"ID":"20684548-2ff4-4e91-b7c0-62bc9e42640d","Type":"ContainerDied","Data":"f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b"} Dec 05 11:25:49 crc kubenswrapper[5014]: I1205 11:25:49.466726 5014 generic.go:334] "Generic (PLEG): container finished" podID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerID="003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b" exitCode=0 Dec 05 11:25:49 crc kubenswrapper[5014]: I1205 11:25:49.466835 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-25k7z" event={"ID":"20684548-2ff4-4e91-b7c0-62bc9e42640d","Type":"ContainerDied","Data":"003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b"} Dec 05 11:25:50 crc kubenswrapper[5014]: I1205 11:25:50.479511 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-25k7z" event={"ID":"20684548-2ff4-4e91-b7c0-62bc9e42640d","Type":"ContainerStarted","Data":"01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354"} Dec 05 11:25:50 crc kubenswrapper[5014]: I1205 11:25:50.506087 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-25k7z" podStartSLOduration=3.082274435 podStartE2EDuration="6.506063506s" podCreationTimestamp="2025-12-05 11:25:44 +0000 UTC" firstStartedPulling="2025-12-05 11:25:46.434899413 +0000 UTC m=+2273.383017117" lastFinishedPulling="2025-12-05 11:25:49.858688484 +0000 UTC m=+2276.806806188" observedRunningTime="2025-12-05 11:25:50.494401691 +0000 UTC m=+2277.442519405" watchObservedRunningTime="2025-12-05 11:25:50.506063506 +0000 UTC m=+2277.454181210" Dec 05 11:25:53 crc kubenswrapper[5014]: I1205 11:25:53.506997 5014 generic.go:334] "Generic (PLEG): container finished" podID="1d432303-8ec7-44e2-8a87-d5e5c8c59979" containerID="ab8edd169e834ea913aba65cd318b6829b1f84d0b964977744628eee254874df" exitCode=0 Dec 05 11:25:53 crc kubenswrapper[5014]: I1205 11:25:53.507107 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" event={"ID":"1d432303-8ec7-44e2-8a87-d5e5c8c59979","Type":"ContainerDied","Data":"ab8edd169e834ea913aba65cd318b6829b1f84d0b964977744628eee254874df"} Dec 05 11:25:54 crc kubenswrapper[5014]: I1205 11:25:54.621018 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:54 crc kubenswrapper[5014]: I1205 11:25:54.621358 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:54 crc kubenswrapper[5014]: I1205 11:25:54.743947 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.064406 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.171010 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-inventory\") pod \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.171137 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovn-combined-ca-bundle\") pod \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.171242 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxz2s\" (UniqueName: \"kubernetes.io/projected/1d432303-8ec7-44e2-8a87-d5e5c8c59979-kube-api-access-jxz2s\") pod \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.171263 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovncontroller-config-0\") pod \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.171319 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ssh-key\") pod \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\" (UID: \"1d432303-8ec7-44e2-8a87-d5e5c8c59979\") " Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.177521 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d432303-8ec7-44e2-8a87-d5e5c8c59979-kube-api-access-jxz2s" (OuterVolumeSpecName: "kube-api-access-jxz2s") pod "1d432303-8ec7-44e2-8a87-d5e5c8c59979" (UID: "1d432303-8ec7-44e2-8a87-d5e5c8c59979"). InnerVolumeSpecName "kube-api-access-jxz2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.183182 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "1d432303-8ec7-44e2-8a87-d5e5c8c59979" (UID: "1d432303-8ec7-44e2-8a87-d5e5c8c59979"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.199169 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-inventory" (OuterVolumeSpecName: "inventory") pod "1d432303-8ec7-44e2-8a87-d5e5c8c59979" (UID: "1d432303-8ec7-44e2-8a87-d5e5c8c59979"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.209416 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1d432303-8ec7-44e2-8a87-d5e5c8c59979" (UID: "1d432303-8ec7-44e2-8a87-d5e5c8c59979"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.212208 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "1d432303-8ec7-44e2-8a87-d5e5c8c59979" (UID: "1d432303-8ec7-44e2-8a87-d5e5c8c59979"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.273571 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxz2s\" (UniqueName: \"kubernetes.io/projected/1d432303-8ec7-44e2-8a87-d5e5c8c59979-kube-api-access-jxz2s\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.273605 5014 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.273619 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.273631 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.273643 5014 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d432303-8ec7-44e2-8a87-d5e5c8c59979-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.526863 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" event={"ID":"1d432303-8ec7-44e2-8a87-d5e5c8c59979","Type":"ContainerDied","Data":"0612b1f980299e04158aa6c267ec1f5bd4677acbdf6f1caf056409b8783ca945"} Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.527913 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0612b1f980299e04158aa6c267ec1f5bd4677acbdf6f1caf056409b8783ca945" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.526962 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dsjzk" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.628498 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.691317 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-25k7z"] Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.733868 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn"] Dec 05 11:25:55 crc kubenswrapper[5014]: E1205 11:25:55.734332 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d432303-8ec7-44e2-8a87-d5e5c8c59979" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.734348 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d432303-8ec7-44e2-8a87-d5e5c8c59979" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.734533 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d432303-8ec7-44e2-8a87-d5e5c8c59979" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.735134 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.737202 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.737738 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.737996 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.738488 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.738723 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.738992 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.747766 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn"] Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.784236 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hrtc\" (UniqueName: \"kubernetes.io/projected/d0637356-0bbd-4cbb-a24b-88a27079fb82-kube-api-access-9hrtc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.784308 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.784418 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.784475 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.784772 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.784892 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.887216 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hrtc\" (UniqueName: \"kubernetes.io/projected/d0637356-0bbd-4cbb-a24b-88a27079fb82-kube-api-access-9hrtc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.887310 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.887413 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.887467 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.887587 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.887646 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.892013 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.892546 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.893524 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.893928 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.894499 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:55 crc kubenswrapper[5014]: I1205 11:25:55.914812 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hrtc\" (UniqueName: \"kubernetes.io/projected/d0637356-0bbd-4cbb-a24b-88a27079fb82-kube-api-access-9hrtc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:56 crc kubenswrapper[5014]: I1205 11:25:56.071795 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:25:56 crc kubenswrapper[5014]: I1205 11:25:56.622772 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn"] Dec 05 11:25:56 crc kubenswrapper[5014]: W1205 11:25:56.628695 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0637356_0bbd_4cbb_a24b_88a27079fb82.slice/crio-ef164622dc46d391cc41c31c00fa42da102dc6882ac4f65d54c79753d8589bcc WatchSource:0}: Error finding container ef164622dc46d391cc41c31c00fa42da102dc6882ac4f65d54c79753d8589bcc: Status 404 returned error can't find the container with id ef164622dc46d391cc41c31c00fa42da102dc6882ac4f65d54c79753d8589bcc Dec 05 11:25:57 crc kubenswrapper[5014]: I1205 11:25:57.545776 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" event={"ID":"d0637356-0bbd-4cbb-a24b-88a27079fb82","Type":"ContainerStarted","Data":"9b602a066b90c37d4074fd9f0f96449d04c6ea257b06201b55f1279c66640228"} Dec 05 11:25:57 crc kubenswrapper[5014]: I1205 11:25:57.546532 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" event={"ID":"d0637356-0bbd-4cbb-a24b-88a27079fb82","Type":"ContainerStarted","Data":"ef164622dc46d391cc41c31c00fa42da102dc6882ac4f65d54c79753d8589bcc"} Dec 05 11:25:57 crc kubenswrapper[5014]: I1205 11:25:57.546011 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-25k7z" podUID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerName="registry-server" containerID="cri-o://01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354" gracePeriod=2 Dec 05 11:25:57 crc kubenswrapper[5014]: I1205 11:25:57.572348 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" podStartSLOduration=2.177329137 podStartE2EDuration="2.572329777s" podCreationTimestamp="2025-12-05 11:25:55 +0000 UTC" firstStartedPulling="2025-12-05 11:25:56.631158841 +0000 UTC m=+2283.579276545" lastFinishedPulling="2025-12-05 11:25:57.026159481 +0000 UTC m=+2283.974277185" observedRunningTime="2025-12-05 11:25:57.562309763 +0000 UTC m=+2284.510427467" watchObservedRunningTime="2025-12-05 11:25:57.572329777 +0000 UTC m=+2284.520447481" Dec 05 11:25:57 crc kubenswrapper[5014]: I1205 11:25:57.957198 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.055722 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-utilities\") pod \"20684548-2ff4-4e91-b7c0-62bc9e42640d\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.055824 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpxp5\" (UniqueName: \"kubernetes.io/projected/20684548-2ff4-4e91-b7c0-62bc9e42640d-kube-api-access-mpxp5\") pod \"20684548-2ff4-4e91-b7c0-62bc9e42640d\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.055932 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-catalog-content\") pod \"20684548-2ff4-4e91-b7c0-62bc9e42640d\" (UID: \"20684548-2ff4-4e91-b7c0-62bc9e42640d\") " Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.060848 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-utilities" (OuterVolumeSpecName: "utilities") pod "20684548-2ff4-4e91-b7c0-62bc9e42640d" (UID: "20684548-2ff4-4e91-b7c0-62bc9e42640d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.067853 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20684548-2ff4-4e91-b7c0-62bc9e42640d-kube-api-access-mpxp5" (OuterVolumeSpecName: "kube-api-access-mpxp5") pod "20684548-2ff4-4e91-b7c0-62bc9e42640d" (UID: "20684548-2ff4-4e91-b7c0-62bc9e42640d"). InnerVolumeSpecName "kube-api-access-mpxp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.129238 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "20684548-2ff4-4e91-b7c0-62bc9e42640d" (UID: "20684548-2ff4-4e91-b7c0-62bc9e42640d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.158222 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.158336 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpxp5\" (UniqueName: \"kubernetes.io/projected/20684548-2ff4-4e91-b7c0-62bc9e42640d-kube-api-access-mpxp5\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.158354 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20684548-2ff4-4e91-b7c0-62bc9e42640d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.561959 5014 generic.go:334] "Generic (PLEG): container finished" podID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerID="01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354" exitCode=0 Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.562399 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-25k7z" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.562179 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-25k7z" event={"ID":"20684548-2ff4-4e91-b7c0-62bc9e42640d","Type":"ContainerDied","Data":"01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354"} Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.562582 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-25k7z" event={"ID":"20684548-2ff4-4e91-b7c0-62bc9e42640d","Type":"ContainerDied","Data":"db56b40ea3d6edcb66e8d95cff71eb873000e4dca2189c47871509da207983ec"} Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.562631 5014 scope.go:117] "RemoveContainer" containerID="01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.600178 5014 scope.go:117] "RemoveContainer" containerID="003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.607783 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-25k7z"] Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.625583 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-25k7z"] Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.634676 5014 scope.go:117] "RemoveContainer" containerID="f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.665313 5014 scope.go:117] "RemoveContainer" containerID="01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354" Dec 05 11:25:58 crc kubenswrapper[5014]: E1205 11:25:58.665788 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354\": container with ID starting with 01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354 not found: ID does not exist" containerID="01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.665837 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354"} err="failed to get container status \"01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354\": rpc error: code = NotFound desc = could not find container \"01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354\": container with ID starting with 01a6ac12b4ff45dd4a9fa4566cebd9a7d2ba9118dd5e94021746d70ab6672354 not found: ID does not exist" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.665869 5014 scope.go:117] "RemoveContainer" containerID="003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b" Dec 05 11:25:58 crc kubenswrapper[5014]: E1205 11:25:58.666383 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b\": container with ID starting with 003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b not found: ID does not exist" containerID="003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.666415 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b"} err="failed to get container status \"003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b\": rpc error: code = NotFound desc = could not find container \"003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b\": container with ID starting with 003a76db181ff1d77075cbaed990ec5ffeb9596d620e23dae65da1f1ab44737b not found: ID does not exist" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.666437 5014 scope.go:117] "RemoveContainer" containerID="f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b" Dec 05 11:25:58 crc kubenswrapper[5014]: E1205 11:25:58.666854 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b\": container with ID starting with f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b not found: ID does not exist" containerID="f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b" Dec 05 11:25:58 crc kubenswrapper[5014]: I1205 11:25:58.666873 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b"} err="failed to get container status \"f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b\": rpc error: code = NotFound desc = could not find container \"f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b\": container with ID starting with f186071d548f17bbb2ce58e34ce201a68022f79a1237a4c12aba9bf8b4ea2d9b not found: ID does not exist" Dec 05 11:25:59 crc kubenswrapper[5014]: I1205 11:25:59.328291 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20684548-2ff4-4e91-b7c0-62bc9e42640d" path="/var/lib/kubelet/pods/20684548-2ff4-4e91-b7c0-62bc9e42640d/volumes" Dec 05 11:26:49 crc kubenswrapper[5014]: I1205 11:26:49.041513 5014 generic.go:334] "Generic (PLEG): container finished" podID="d0637356-0bbd-4cbb-a24b-88a27079fb82" containerID="9b602a066b90c37d4074fd9f0f96449d04c6ea257b06201b55f1279c66640228" exitCode=0 Dec 05 11:26:49 crc kubenswrapper[5014]: I1205 11:26:49.041610 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" event={"ID":"d0637356-0bbd-4cbb-a24b-88a27079fb82","Type":"ContainerDied","Data":"9b602a066b90c37d4074fd9f0f96449d04c6ea257b06201b55f1279c66640228"} Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.497373 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.622130 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-metadata-combined-ca-bundle\") pod \"d0637356-0bbd-4cbb-a24b-88a27079fb82\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.622519 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-inventory\") pod \"d0637356-0bbd-4cbb-a24b-88a27079fb82\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.622753 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-ovn-metadata-agent-neutron-config-0\") pod \"d0637356-0bbd-4cbb-a24b-88a27079fb82\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.622835 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-ssh-key\") pod \"d0637356-0bbd-4cbb-a24b-88a27079fb82\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.622891 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hrtc\" (UniqueName: \"kubernetes.io/projected/d0637356-0bbd-4cbb-a24b-88a27079fb82-kube-api-access-9hrtc\") pod \"d0637356-0bbd-4cbb-a24b-88a27079fb82\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.623091 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-nova-metadata-neutron-config-0\") pod \"d0637356-0bbd-4cbb-a24b-88a27079fb82\" (UID: \"d0637356-0bbd-4cbb-a24b-88a27079fb82\") " Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.627496 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "d0637356-0bbd-4cbb-a24b-88a27079fb82" (UID: "d0637356-0bbd-4cbb-a24b-88a27079fb82"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.634325 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0637356-0bbd-4cbb-a24b-88a27079fb82-kube-api-access-9hrtc" (OuterVolumeSpecName: "kube-api-access-9hrtc") pod "d0637356-0bbd-4cbb-a24b-88a27079fb82" (UID: "d0637356-0bbd-4cbb-a24b-88a27079fb82"). InnerVolumeSpecName "kube-api-access-9hrtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.649545 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "d0637356-0bbd-4cbb-a24b-88a27079fb82" (UID: "d0637356-0bbd-4cbb-a24b-88a27079fb82"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.651959 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "d0637356-0bbd-4cbb-a24b-88a27079fb82" (UID: "d0637356-0bbd-4cbb-a24b-88a27079fb82"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.655374 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d0637356-0bbd-4cbb-a24b-88a27079fb82" (UID: "d0637356-0bbd-4cbb-a24b-88a27079fb82"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.655823 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-inventory" (OuterVolumeSpecName: "inventory") pod "d0637356-0bbd-4cbb-a24b-88a27079fb82" (UID: "d0637356-0bbd-4cbb-a24b-88a27079fb82"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.726026 5014 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.726079 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.726097 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hrtc\" (UniqueName: \"kubernetes.io/projected/d0637356-0bbd-4cbb-a24b-88a27079fb82-kube-api-access-9hrtc\") on node \"crc\" DevicePath \"\"" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.726110 5014 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.726122 5014 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:26:50 crc kubenswrapper[5014]: I1205 11:26:50.726136 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0637356-0bbd-4cbb-a24b-88a27079fb82-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.060400 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" event={"ID":"d0637356-0bbd-4cbb-a24b-88a27079fb82","Type":"ContainerDied","Data":"ef164622dc46d391cc41c31c00fa42da102dc6882ac4f65d54c79753d8589bcc"} Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.060453 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef164622dc46d391cc41c31c00fa42da102dc6882ac4f65d54c79753d8589bcc" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.060461 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.154860 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h"] Dec 05 11:26:51 crc kubenswrapper[5014]: E1205 11:26:51.155712 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerName="extract-content" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.155743 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerName="extract-content" Dec 05 11:26:51 crc kubenswrapper[5014]: E1205 11:26:51.155792 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0637356-0bbd-4cbb-a24b-88a27079fb82" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.155807 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0637356-0bbd-4cbb-a24b-88a27079fb82" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 11:26:51 crc kubenswrapper[5014]: E1205 11:26:51.155835 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerName="registry-server" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.155849 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerName="registry-server" Dec 05 11:26:51 crc kubenswrapper[5014]: E1205 11:26:51.155900 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerName="extract-utilities" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.155912 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerName="extract-utilities" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.156224 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="20684548-2ff4-4e91-b7c0-62bc9e42640d" containerName="registry-server" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.156259 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0637356-0bbd-4cbb-a24b-88a27079fb82" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.157071 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.166037 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.166126 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.166144 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.166202 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.166394 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.174151 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h"] Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.233912 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.233986 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxs9d\" (UniqueName: \"kubernetes.io/projected/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-kube-api-access-lxs9d\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.234320 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.234399 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.234591 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.336046 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.336149 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.336176 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxs9d\" (UniqueName: \"kubernetes.io/projected/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-kube-api-access-lxs9d\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.336308 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.336342 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.341858 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.342094 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.343193 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.343744 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.363859 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxs9d\" (UniqueName: \"kubernetes.io/projected/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-kube-api-access-lxs9d\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:51 crc kubenswrapper[5014]: I1205 11:26:51.480916 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:26:52 crc kubenswrapper[5014]: I1205 11:26:52.028451 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h"] Dec 05 11:26:52 crc kubenswrapper[5014]: I1205 11:26:52.090555 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" event={"ID":"fab44f82-d30a-4bb9-b416-5ff67a5f55b6","Type":"ContainerStarted","Data":"60413c8acb28fa78086da8551a97e96eee565d020b31a7cb7bf9ec16548f305e"} Dec 05 11:26:53 crc kubenswrapper[5014]: I1205 11:26:53.101601 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" event={"ID":"fab44f82-d30a-4bb9-b416-5ff67a5f55b6","Type":"ContainerStarted","Data":"9b9accb5375db4453f4434021fe72788d646608167a6543bbe0e3f9381653a4f"} Dec 05 11:26:53 crc kubenswrapper[5014]: I1205 11:26:53.129939 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" podStartSLOduration=1.695520584 podStartE2EDuration="2.129922158s" podCreationTimestamp="2025-12-05 11:26:51 +0000 UTC" firstStartedPulling="2025-12-05 11:26:52.039443209 +0000 UTC m=+2338.987560913" lastFinishedPulling="2025-12-05 11:26:52.473844773 +0000 UTC m=+2339.421962487" observedRunningTime="2025-12-05 11:26:53.123677306 +0000 UTC m=+2340.071795030" watchObservedRunningTime="2025-12-05 11:26:53.129922158 +0000 UTC m=+2340.078039862" Dec 05 11:27:02 crc kubenswrapper[5014]: I1205 11:27:02.936872 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:27:02 crc kubenswrapper[5014]: I1205 11:27:02.937542 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.504168 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lv9kk"] Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.508540 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.513344 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lv9kk"] Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.669824 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74x85\" (UniqueName: \"kubernetes.io/projected/66f164cd-a4d2-42cd-baa0-68403ac8a720-kube-api-access-74x85\") pod \"community-operators-lv9kk\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.670237 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-utilities\") pod \"community-operators-lv9kk\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.670457 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-catalog-content\") pod \"community-operators-lv9kk\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.771993 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74x85\" (UniqueName: \"kubernetes.io/projected/66f164cd-a4d2-42cd-baa0-68403ac8a720-kube-api-access-74x85\") pod \"community-operators-lv9kk\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.772143 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-utilities\") pod \"community-operators-lv9kk\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.772189 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-catalog-content\") pod \"community-operators-lv9kk\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.772784 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-catalog-content\") pod \"community-operators-lv9kk\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.772824 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-utilities\") pod \"community-operators-lv9kk\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.790981 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74x85\" (UniqueName: \"kubernetes.io/projected/66f164cd-a4d2-42cd-baa0-68403ac8a720-kube-api-access-74x85\") pod \"community-operators-lv9kk\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:17 crc kubenswrapper[5014]: I1205 11:27:17.829333 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:18 crc kubenswrapper[5014]: I1205 11:27:18.316437 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lv9kk"] Dec 05 11:27:18 crc kubenswrapper[5014]: I1205 11:27:18.349146 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lv9kk" event={"ID":"66f164cd-a4d2-42cd-baa0-68403ac8a720","Type":"ContainerStarted","Data":"1397d8d234404b5f0449def3ab8d554703baf32785d44e311655d031e0cebca4"} Dec 05 11:27:19 crc kubenswrapper[5014]: I1205 11:27:19.360156 5014 generic.go:334] "Generic (PLEG): container finished" podID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerID="7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592" exitCode=0 Dec 05 11:27:19 crc kubenswrapper[5014]: I1205 11:27:19.360334 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lv9kk" event={"ID":"66f164cd-a4d2-42cd-baa0-68403ac8a720","Type":"ContainerDied","Data":"7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592"} Dec 05 11:27:21 crc kubenswrapper[5014]: I1205 11:27:21.377370 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lv9kk" event={"ID":"66f164cd-a4d2-42cd-baa0-68403ac8a720","Type":"ContainerStarted","Data":"f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83"} Dec 05 11:27:22 crc kubenswrapper[5014]: I1205 11:27:22.390503 5014 generic.go:334] "Generic (PLEG): container finished" podID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerID="f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83" exitCode=0 Dec 05 11:27:22 crc kubenswrapper[5014]: I1205 11:27:22.390556 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lv9kk" event={"ID":"66f164cd-a4d2-42cd-baa0-68403ac8a720","Type":"ContainerDied","Data":"f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83"} Dec 05 11:27:23 crc kubenswrapper[5014]: I1205 11:27:23.406156 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lv9kk" event={"ID":"66f164cd-a4d2-42cd-baa0-68403ac8a720","Type":"ContainerStarted","Data":"53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732"} Dec 05 11:27:23 crc kubenswrapper[5014]: I1205 11:27:23.427907 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lv9kk" podStartSLOduration=2.990548758 podStartE2EDuration="6.427892572s" podCreationTimestamp="2025-12-05 11:27:17 +0000 UTC" firstStartedPulling="2025-12-05 11:27:19.362418896 +0000 UTC m=+2366.310536600" lastFinishedPulling="2025-12-05 11:27:22.7997627 +0000 UTC m=+2369.747880414" observedRunningTime="2025-12-05 11:27:23.42331664 +0000 UTC m=+2370.371434354" watchObservedRunningTime="2025-12-05 11:27:23.427892572 +0000 UTC m=+2370.376010276" Dec 05 11:27:27 crc kubenswrapper[5014]: I1205 11:27:27.829945 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:27 crc kubenswrapper[5014]: I1205 11:27:27.830547 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:27 crc kubenswrapper[5014]: I1205 11:27:27.884140 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:28 crc kubenswrapper[5014]: I1205 11:27:28.507212 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:28 crc kubenswrapper[5014]: I1205 11:27:28.558777 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lv9kk"] Dec 05 11:27:30 crc kubenswrapper[5014]: I1205 11:27:30.470867 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lv9kk" podUID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerName="registry-server" containerID="cri-o://53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732" gracePeriod=2 Dec 05 11:27:30 crc kubenswrapper[5014]: I1205 11:27:30.868030 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:30 crc kubenswrapper[5014]: I1205 11:27:30.969567 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-catalog-content\") pod \"66f164cd-a4d2-42cd-baa0-68403ac8a720\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " Dec 05 11:27:30 crc kubenswrapper[5014]: I1205 11:27:30.969848 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74x85\" (UniqueName: \"kubernetes.io/projected/66f164cd-a4d2-42cd-baa0-68403ac8a720-kube-api-access-74x85\") pod \"66f164cd-a4d2-42cd-baa0-68403ac8a720\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " Dec 05 11:27:30 crc kubenswrapper[5014]: I1205 11:27:30.969902 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-utilities\") pod \"66f164cd-a4d2-42cd-baa0-68403ac8a720\" (UID: \"66f164cd-a4d2-42cd-baa0-68403ac8a720\") " Dec 05 11:27:30 crc kubenswrapper[5014]: I1205 11:27:30.971267 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-utilities" (OuterVolumeSpecName: "utilities") pod "66f164cd-a4d2-42cd-baa0-68403ac8a720" (UID: "66f164cd-a4d2-42cd-baa0-68403ac8a720"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:27:30 crc kubenswrapper[5014]: I1205 11:27:30.978633 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66f164cd-a4d2-42cd-baa0-68403ac8a720-kube-api-access-74x85" (OuterVolumeSpecName: "kube-api-access-74x85") pod "66f164cd-a4d2-42cd-baa0-68403ac8a720" (UID: "66f164cd-a4d2-42cd-baa0-68403ac8a720"). InnerVolumeSpecName "kube-api-access-74x85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.026078 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66f164cd-a4d2-42cd-baa0-68403ac8a720" (UID: "66f164cd-a4d2-42cd-baa0-68403ac8a720"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.072773 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.072808 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74x85\" (UniqueName: \"kubernetes.io/projected/66f164cd-a4d2-42cd-baa0-68403ac8a720-kube-api-access-74x85\") on node \"crc\" DevicePath \"\"" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.072819 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f164cd-a4d2-42cd-baa0-68403ac8a720-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.499429 5014 generic.go:334] "Generic (PLEG): container finished" podID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerID="53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732" exitCode=0 Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.499528 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lv9kk" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.499510 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lv9kk" event={"ID":"66f164cd-a4d2-42cd-baa0-68403ac8a720","Type":"ContainerDied","Data":"53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732"} Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.499691 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lv9kk" event={"ID":"66f164cd-a4d2-42cd-baa0-68403ac8a720","Type":"ContainerDied","Data":"1397d8d234404b5f0449def3ab8d554703baf32785d44e311655d031e0cebca4"} Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.499719 5014 scope.go:117] "RemoveContainer" containerID="53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.526425 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lv9kk"] Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.531727 5014 scope.go:117] "RemoveContainer" containerID="f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.539884 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lv9kk"] Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.559289 5014 scope.go:117] "RemoveContainer" containerID="7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.593215 5014 scope.go:117] "RemoveContainer" containerID="53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732" Dec 05 11:27:31 crc kubenswrapper[5014]: E1205 11:27:31.593665 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732\": container with ID starting with 53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732 not found: ID does not exist" containerID="53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.593696 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732"} err="failed to get container status \"53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732\": rpc error: code = NotFound desc = could not find container \"53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732\": container with ID starting with 53f24a6db1fecac5ea50874cc335fd15c88ac32a890aae1288cb35c3ec096732 not found: ID does not exist" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.593716 5014 scope.go:117] "RemoveContainer" containerID="f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83" Dec 05 11:27:31 crc kubenswrapper[5014]: E1205 11:27:31.594208 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83\": container with ID starting with f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83 not found: ID does not exist" containerID="f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.594258 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83"} err="failed to get container status \"f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83\": rpc error: code = NotFound desc = could not find container \"f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83\": container with ID starting with f2900a59fd88fc219219b21d4c952125c7326b8c08b1320f94e1ae20bbf83e83 not found: ID does not exist" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.594349 5014 scope.go:117] "RemoveContainer" containerID="7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592" Dec 05 11:27:31 crc kubenswrapper[5014]: E1205 11:27:31.594810 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592\": container with ID starting with 7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592 not found: ID does not exist" containerID="7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592" Dec 05 11:27:31 crc kubenswrapper[5014]: I1205 11:27:31.594870 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592"} err="failed to get container status \"7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592\": rpc error: code = NotFound desc = could not find container \"7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592\": container with ID starting with 7bc5f338d527644a91cb1fb5dded6f9404cec5c2eac7e11c8ef673f900f0c592 not found: ID does not exist" Dec 05 11:27:32 crc kubenswrapper[5014]: I1205 11:27:32.937463 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:27:32 crc kubenswrapper[5014]: I1205 11:27:32.937886 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:27:33 crc kubenswrapper[5014]: I1205 11:27:33.329767 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66f164cd-a4d2-42cd-baa0-68403ac8a720" path="/var/lib/kubelet/pods/66f164cd-a4d2-42cd-baa0-68403ac8a720/volumes" Dec 05 11:28:02 crc kubenswrapper[5014]: I1205 11:28:02.937105 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:28:02 crc kubenswrapper[5014]: I1205 11:28:02.937747 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:28:02 crc kubenswrapper[5014]: I1205 11:28:02.937796 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:28:02 crc kubenswrapper[5014]: I1205 11:28:02.938641 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:28:02 crc kubenswrapper[5014]: I1205 11:28:02.938733 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" gracePeriod=600 Dec 05 11:28:03 crc kubenswrapper[5014]: E1205 11:28:03.104797 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:28:03 crc kubenswrapper[5014]: I1205 11:28:03.836097 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" exitCode=0 Dec 05 11:28:03 crc kubenswrapper[5014]: I1205 11:28:03.836146 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a"} Dec 05 11:28:03 crc kubenswrapper[5014]: I1205 11:28:03.836188 5014 scope.go:117] "RemoveContainer" containerID="b1d3a1f29e694656b97d30c4a386bb91494e6280ba532e3e802b9a1c08bf1bf2" Dec 05 11:28:03 crc kubenswrapper[5014]: I1205 11:28:03.836876 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:28:03 crc kubenswrapper[5014]: E1205 11:28:03.837216 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:28:18 crc kubenswrapper[5014]: I1205 11:28:18.318332 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:28:18 crc kubenswrapper[5014]: E1205 11:28:18.319090 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:28:31 crc kubenswrapper[5014]: I1205 11:28:31.318083 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:28:31 crc kubenswrapper[5014]: E1205 11:28:31.318906 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:28:44 crc kubenswrapper[5014]: I1205 11:28:44.318698 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:28:44 crc kubenswrapper[5014]: E1205 11:28:44.319509 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:28:56 crc kubenswrapper[5014]: I1205 11:28:56.318398 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:28:56 crc kubenswrapper[5014]: E1205 11:28:56.319159 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:29:07 crc kubenswrapper[5014]: I1205 11:29:07.319630 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:29:07 crc kubenswrapper[5014]: E1205 11:29:07.320543 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:29:20 crc kubenswrapper[5014]: I1205 11:29:20.319151 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:29:20 crc kubenswrapper[5014]: E1205 11:29:20.320203 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:29:34 crc kubenswrapper[5014]: I1205 11:29:34.318746 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:29:34 crc kubenswrapper[5014]: E1205 11:29:34.319510 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:29:45 crc kubenswrapper[5014]: I1205 11:29:45.317853 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:29:45 crc kubenswrapper[5014]: E1205 11:29:45.318582 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:29:57 crc kubenswrapper[5014]: I1205 11:29:57.319299 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:29:57 crc kubenswrapper[5014]: E1205 11:29:57.320310 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.150343 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx"] Dec 05 11:30:00 crc kubenswrapper[5014]: E1205 11:30:00.150980 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerName="extract-utilities" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.150993 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerName="extract-utilities" Dec 05 11:30:00 crc kubenswrapper[5014]: E1205 11:30:00.151021 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerName="extract-content" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.151027 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerName="extract-content" Dec 05 11:30:00 crc kubenswrapper[5014]: E1205 11:30:00.151036 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerName="registry-server" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.151042 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerName="registry-server" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.151289 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="66f164cd-a4d2-42cd-baa0-68403ac8a720" containerName="registry-server" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.151917 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.153892 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.154530 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.162819 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx"] Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.318330 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-config-volume\") pod \"collect-profiles-29415570-4jfbx\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.318478 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-secret-volume\") pod \"collect-profiles-29415570-4jfbx\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.318537 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wc4h\" (UniqueName: \"kubernetes.io/projected/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-kube-api-access-2wc4h\") pod \"collect-profiles-29415570-4jfbx\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.419601 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-secret-volume\") pod \"collect-profiles-29415570-4jfbx\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.419683 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wc4h\" (UniqueName: \"kubernetes.io/projected/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-kube-api-access-2wc4h\") pod \"collect-profiles-29415570-4jfbx\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.419776 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-config-volume\") pod \"collect-profiles-29415570-4jfbx\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.420701 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-config-volume\") pod \"collect-profiles-29415570-4jfbx\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.426449 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-secret-volume\") pod \"collect-profiles-29415570-4jfbx\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.436421 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wc4h\" (UniqueName: \"kubernetes.io/projected/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-kube-api-access-2wc4h\") pod \"collect-profiles-29415570-4jfbx\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.483413 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:00 crc kubenswrapper[5014]: I1205 11:30:00.935101 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx"] Dec 05 11:30:00 crc kubenswrapper[5014]: W1205 11:30:00.939067 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc54a5f3_91d6_4d0f_94d8_5d246f8f1af3.slice/crio-4f24e110b7aaf5de099540350d764b09be312e4b52e7ca14790321170981c009 WatchSource:0}: Error finding container 4f24e110b7aaf5de099540350d764b09be312e4b52e7ca14790321170981c009: Status 404 returned error can't find the container with id 4f24e110b7aaf5de099540350d764b09be312e4b52e7ca14790321170981c009 Dec 05 11:30:01 crc kubenswrapper[5014]: I1205 11:30:01.006130 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" event={"ID":"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3","Type":"ContainerStarted","Data":"4f24e110b7aaf5de099540350d764b09be312e4b52e7ca14790321170981c009"} Dec 05 11:30:02 crc kubenswrapper[5014]: I1205 11:30:02.018754 5014 generic.go:334] "Generic (PLEG): container finished" podID="cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3" containerID="26e7267a0b9fb2d7bc020da8cdcf4269a927c9903c9f6b4ca8269bb14bd6af12" exitCode=0 Dec 05 11:30:02 crc kubenswrapper[5014]: I1205 11:30:02.018875 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" event={"ID":"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3","Type":"ContainerDied","Data":"26e7267a0b9fb2d7bc020da8cdcf4269a927c9903c9f6b4ca8269bb14bd6af12"} Dec 05 11:30:03 crc kubenswrapper[5014]: I1205 11:30:03.381333 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:03 crc kubenswrapper[5014]: I1205 11:30:03.478324 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wc4h\" (UniqueName: \"kubernetes.io/projected/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-kube-api-access-2wc4h\") pod \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " Dec 05 11:30:03 crc kubenswrapper[5014]: I1205 11:30:03.478363 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-config-volume\") pod \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " Dec 05 11:30:03 crc kubenswrapper[5014]: I1205 11:30:03.478430 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-secret-volume\") pod \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\" (UID: \"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3\") " Dec 05 11:30:03 crc kubenswrapper[5014]: I1205 11:30:03.479400 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-config-volume" (OuterVolumeSpecName: "config-volume") pod "cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3" (UID: "cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:30:03 crc kubenswrapper[5014]: I1205 11:30:03.485124 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-kube-api-access-2wc4h" (OuterVolumeSpecName: "kube-api-access-2wc4h") pod "cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3" (UID: "cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3"). InnerVolumeSpecName "kube-api-access-2wc4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:30:03 crc kubenswrapper[5014]: I1205 11:30:03.485424 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3" (UID: "cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:30:03 crc kubenswrapper[5014]: I1205 11:30:03.580356 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wc4h\" (UniqueName: \"kubernetes.io/projected/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-kube-api-access-2wc4h\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:03 crc kubenswrapper[5014]: I1205 11:30:03.580665 5014 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:03 crc kubenswrapper[5014]: I1205 11:30:03.580742 5014 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:30:04 crc kubenswrapper[5014]: I1205 11:30:04.041809 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" event={"ID":"cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3","Type":"ContainerDied","Data":"4f24e110b7aaf5de099540350d764b09be312e4b52e7ca14790321170981c009"} Dec 05 11:30:04 crc kubenswrapper[5014]: I1205 11:30:04.041852 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415570-4jfbx" Dec 05 11:30:04 crc kubenswrapper[5014]: I1205 11:30:04.041861 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f24e110b7aaf5de099540350d764b09be312e4b52e7ca14790321170981c009" Dec 05 11:30:04 crc kubenswrapper[5014]: I1205 11:30:04.473150 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2"] Dec 05 11:30:04 crc kubenswrapper[5014]: I1205 11:30:04.485841 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415525-jdvh2"] Dec 05 11:30:05 crc kubenswrapper[5014]: I1205 11:30:05.332332 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9aa2370d-27db-4547-95f3-f09274275737" path="/var/lib/kubelet/pods/9aa2370d-27db-4547-95f3-f09274275737/volumes" Dec 05 11:30:05 crc kubenswrapper[5014]: I1205 11:30:05.913023 5014 scope.go:117] "RemoveContainer" containerID="d5fe98dca49cf644fd6c63dd35510af43dbf99c23a5ce6d8cb103e3566f01b18" Dec 05 11:30:10 crc kubenswrapper[5014]: I1205 11:30:10.318627 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:30:10 crc kubenswrapper[5014]: E1205 11:30:10.319745 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:30:22 crc kubenswrapper[5014]: I1205 11:30:22.319856 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:30:22 crc kubenswrapper[5014]: E1205 11:30:22.321205 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:30:35 crc kubenswrapper[5014]: I1205 11:30:35.320209 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:30:35 crc kubenswrapper[5014]: E1205 11:30:35.320889 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:30:50 crc kubenswrapper[5014]: I1205 11:30:50.319227 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:30:50 crc kubenswrapper[5014]: E1205 11:30:50.320079 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:31:04 crc kubenswrapper[5014]: I1205 11:31:04.318582 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:31:04 crc kubenswrapper[5014]: E1205 11:31:04.319337 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:31:19 crc kubenswrapper[5014]: I1205 11:31:19.317924 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:31:19 crc kubenswrapper[5014]: E1205 11:31:19.318785 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:31:33 crc kubenswrapper[5014]: I1205 11:31:33.025332 5014 generic.go:334] "Generic (PLEG): container finished" podID="fab44f82-d30a-4bb9-b416-5ff67a5f55b6" containerID="9b9accb5375db4453f4434021fe72788d646608167a6543bbe0e3f9381653a4f" exitCode=0 Dec 05 11:31:33 crc kubenswrapper[5014]: I1205 11:31:33.025409 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" event={"ID":"fab44f82-d30a-4bb9-b416-5ff67a5f55b6","Type":"ContainerDied","Data":"9b9accb5375db4453f4434021fe72788d646608167a6543bbe0e3f9381653a4f"} Dec 05 11:31:33 crc kubenswrapper[5014]: I1205 11:31:33.327107 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:31:33 crc kubenswrapper[5014]: E1205 11:31:33.327427 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.531852 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.722941 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-secret-0\") pod \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.723001 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxs9d\" (UniqueName: \"kubernetes.io/projected/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-kube-api-access-lxs9d\") pod \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.723241 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-inventory\") pod \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.723291 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-ssh-key\") pod \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.723326 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-combined-ca-bundle\") pod \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\" (UID: \"fab44f82-d30a-4bb9-b416-5ff67a5f55b6\") " Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.729558 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-kube-api-access-lxs9d" (OuterVolumeSpecName: "kube-api-access-lxs9d") pod "fab44f82-d30a-4bb9-b416-5ff67a5f55b6" (UID: "fab44f82-d30a-4bb9-b416-5ff67a5f55b6"). InnerVolumeSpecName "kube-api-access-lxs9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.730637 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "fab44f82-d30a-4bb9-b416-5ff67a5f55b6" (UID: "fab44f82-d30a-4bb9-b416-5ff67a5f55b6"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.767500 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fab44f82-d30a-4bb9-b416-5ff67a5f55b6" (UID: "fab44f82-d30a-4bb9-b416-5ff67a5f55b6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.774933 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "fab44f82-d30a-4bb9-b416-5ff67a5f55b6" (UID: "fab44f82-d30a-4bb9-b416-5ff67a5f55b6"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.778329 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-inventory" (OuterVolumeSpecName: "inventory") pod "fab44f82-d30a-4bb9-b416-5ff67a5f55b6" (UID: "fab44f82-d30a-4bb9-b416-5ff67a5f55b6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.826509 5014 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.826538 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxs9d\" (UniqueName: \"kubernetes.io/projected/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-kube-api-access-lxs9d\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.826552 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.826562 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:34 crc kubenswrapper[5014]: I1205 11:31:34.826571 5014 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fab44f82-d30a-4bb9-b416-5ff67a5f55b6-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.057439 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" event={"ID":"fab44f82-d30a-4bb9-b416-5ff67a5f55b6","Type":"ContainerDied","Data":"60413c8acb28fa78086da8551a97e96eee565d020b31a7cb7bf9ec16548f305e"} Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.057494 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60413c8acb28fa78086da8551a97e96eee565d020b31a7cb7bf9ec16548f305e" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.057520 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.147793 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx"] Dec 05 11:31:35 crc kubenswrapper[5014]: E1205 11:31:35.148865 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fab44f82-d30a-4bb9-b416-5ff67a5f55b6" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.149008 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="fab44f82-d30a-4bb9-b416-5ff67a5f55b6" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 11:31:35 crc kubenswrapper[5014]: E1205 11:31:35.149118 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3" containerName="collect-profiles" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.149215 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3" containerName="collect-profiles" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.149543 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc54a5f3-91d6-4d0f-94d8-5d246f8f1af3" containerName="collect-profiles" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.149635 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="fab44f82-d30a-4bb9-b416-5ff67a5f55b6" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.150390 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.153513 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.153937 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.154228 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.154594 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.154856 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.155094 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.155114 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.167192 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx"] Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.235997 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjtzm\" (UniqueName: \"kubernetes.io/projected/1ef403f3-902c-41ac-874b-25627e6b5637-kube-api-access-kjtzm\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.236073 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.236121 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.236257 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1ef403f3-902c-41ac-874b-25627e6b5637-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.236339 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.236391 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.236426 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.236475 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.236589 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.338641 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1ef403f3-902c-41ac-874b-25627e6b5637-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.339099 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.339176 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.339215 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.339298 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.339424 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.339636 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1ef403f3-902c-41ac-874b-25627e6b5637-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.340912 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjtzm\" (UniqueName: \"kubernetes.io/projected/1ef403f3-902c-41ac-874b-25627e6b5637-kube-api-access-kjtzm\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.340977 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.341025 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.344500 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.345261 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.345414 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.347510 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.347880 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.348218 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.350316 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.365113 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjtzm\" (UniqueName: \"kubernetes.io/projected/1ef403f3-902c-41ac-874b-25627e6b5637-kube-api-access-kjtzm\") pod \"nova-edpm-deployment-openstack-edpm-ipam-hw7cx\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:35 crc kubenswrapper[5014]: I1205 11:31:35.475785 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:31:36 crc kubenswrapper[5014]: I1205 11:31:36.037993 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx"] Dec 05 11:31:36 crc kubenswrapper[5014]: I1205 11:31:36.044046 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:31:36 crc kubenswrapper[5014]: I1205 11:31:36.067447 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" event={"ID":"1ef403f3-902c-41ac-874b-25627e6b5637","Type":"ContainerStarted","Data":"54ef8cf5989f6ef0967ea80ef8ad89ef6c95d34f31492c689769c0604f4b36ef"} Dec 05 11:31:37 crc kubenswrapper[5014]: I1205 11:31:37.078478 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" event={"ID":"1ef403f3-902c-41ac-874b-25627e6b5637","Type":"ContainerStarted","Data":"7540f9ea50308493c3fd206b89af4a7e73561fc763f526f05073ff17d0058b6d"} Dec 05 11:31:37 crc kubenswrapper[5014]: I1205 11:31:37.098545 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" podStartSLOduration=1.607363623 podStartE2EDuration="2.098525001s" podCreationTimestamp="2025-12-05 11:31:35 +0000 UTC" firstStartedPulling="2025-12-05 11:31:36.043757714 +0000 UTC m=+2622.991875418" lastFinishedPulling="2025-12-05 11:31:36.534919092 +0000 UTC m=+2623.483036796" observedRunningTime="2025-12-05 11:31:37.096987232 +0000 UTC m=+2624.045104946" watchObservedRunningTime="2025-12-05 11:31:37.098525001 +0000 UTC m=+2624.046642715" Dec 05 11:31:46 crc kubenswrapper[5014]: I1205 11:31:46.318487 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:31:46 crc kubenswrapper[5014]: E1205 11:31:46.319491 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:31:58 crc kubenswrapper[5014]: I1205 11:31:58.318589 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:31:58 crc kubenswrapper[5014]: E1205 11:31:58.319419 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:32:09 crc kubenswrapper[5014]: I1205 11:32:09.318937 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:32:09 crc kubenswrapper[5014]: E1205 11:32:09.320313 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:32:22 crc kubenswrapper[5014]: I1205 11:32:22.319060 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:32:22 crc kubenswrapper[5014]: E1205 11:32:22.320030 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:32:37 crc kubenswrapper[5014]: I1205 11:32:37.318582 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:32:37 crc kubenswrapper[5014]: E1205 11:32:37.320258 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.551953 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5b6kp"] Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.554493 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.566677 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5b6kp"] Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.649543 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-utilities\") pod \"redhat-operators-5b6kp\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.649720 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gspvx\" (UniqueName: \"kubernetes.io/projected/bb81cacb-9fd1-4525-a199-a348966adc74-kube-api-access-gspvx\") pod \"redhat-operators-5b6kp\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.649759 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-catalog-content\") pod \"redhat-operators-5b6kp\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.753727 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gspvx\" (UniqueName: \"kubernetes.io/projected/bb81cacb-9fd1-4525-a199-a348966adc74-kube-api-access-gspvx\") pod \"redhat-operators-5b6kp\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.753816 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-catalog-content\") pod \"redhat-operators-5b6kp\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.753969 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-utilities\") pod \"redhat-operators-5b6kp\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.754652 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-utilities\") pod \"redhat-operators-5b6kp\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.755437 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-catalog-content\") pod \"redhat-operators-5b6kp\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.773537 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gspvx\" (UniqueName: \"kubernetes.io/projected/bb81cacb-9fd1-4525-a199-a348966adc74-kube-api-access-gspvx\") pod \"redhat-operators-5b6kp\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:44 crc kubenswrapper[5014]: I1205 11:32:44.875131 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:45 crc kubenswrapper[5014]: I1205 11:32:45.411356 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5b6kp"] Dec 05 11:32:45 crc kubenswrapper[5014]: I1205 11:32:45.940454 5014 generic.go:334] "Generic (PLEG): container finished" podID="bb81cacb-9fd1-4525-a199-a348966adc74" containerID="48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39" exitCode=0 Dec 05 11:32:45 crc kubenswrapper[5014]: I1205 11:32:45.940672 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b6kp" event={"ID":"bb81cacb-9fd1-4525-a199-a348966adc74","Type":"ContainerDied","Data":"48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39"} Dec 05 11:32:45 crc kubenswrapper[5014]: I1205 11:32:45.940793 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b6kp" event={"ID":"bb81cacb-9fd1-4525-a199-a348966adc74","Type":"ContainerStarted","Data":"5651bf3382b5bdb5051fdfb6d88040882d7ff0aa21504780f324c5c83e274819"} Dec 05 11:32:46 crc kubenswrapper[5014]: I1205 11:32:46.951538 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b6kp" event={"ID":"bb81cacb-9fd1-4525-a199-a348966adc74","Type":"ContainerStarted","Data":"409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc"} Dec 05 11:32:49 crc kubenswrapper[5014]: I1205 11:32:49.978729 5014 generic.go:334] "Generic (PLEG): container finished" podID="bb81cacb-9fd1-4525-a199-a348966adc74" containerID="409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc" exitCode=0 Dec 05 11:32:49 crc kubenswrapper[5014]: I1205 11:32:49.978809 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b6kp" event={"ID":"bb81cacb-9fd1-4525-a199-a348966adc74","Type":"ContainerDied","Data":"409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc"} Dec 05 11:32:50 crc kubenswrapper[5014]: I1205 11:32:50.318417 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:32:50 crc kubenswrapper[5014]: E1205 11:32:50.318714 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:32:52 crc kubenswrapper[5014]: I1205 11:32:52.008408 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b6kp" event={"ID":"bb81cacb-9fd1-4525-a199-a348966adc74","Type":"ContainerStarted","Data":"700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84"} Dec 05 11:32:52 crc kubenswrapper[5014]: I1205 11:32:52.031367 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5b6kp" podStartSLOduration=2.993894892 podStartE2EDuration="8.031348633s" podCreationTimestamp="2025-12-05 11:32:44 +0000 UTC" firstStartedPulling="2025-12-05 11:32:45.942885838 +0000 UTC m=+2692.891003542" lastFinishedPulling="2025-12-05 11:32:50.980339589 +0000 UTC m=+2697.928457283" observedRunningTime="2025-12-05 11:32:52.027938198 +0000 UTC m=+2698.976055922" watchObservedRunningTime="2025-12-05 11:32:52.031348633 +0000 UTC m=+2698.979466337" Dec 05 11:32:54 crc kubenswrapper[5014]: I1205 11:32:54.875312 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:54 crc kubenswrapper[5014]: I1205 11:32:54.875627 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:32:55 crc kubenswrapper[5014]: I1205 11:32:55.923193 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5b6kp" podUID="bb81cacb-9fd1-4525-a199-a348966adc74" containerName="registry-server" probeResult="failure" output=< Dec 05 11:32:55 crc kubenswrapper[5014]: timeout: failed to connect service ":50051" within 1s Dec 05 11:32:55 crc kubenswrapper[5014]: > Dec 05 11:33:01 crc kubenswrapper[5014]: I1205 11:33:01.318396 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:33:01 crc kubenswrapper[5014]: E1205 11:33:01.318966 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:33:04 crc kubenswrapper[5014]: I1205 11:33:04.934101 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:33:04 crc kubenswrapper[5014]: I1205 11:33:04.980988 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:33:05 crc kubenswrapper[5014]: I1205 11:33:05.180630 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5b6kp"] Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.124983 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5b6kp" podUID="bb81cacb-9fd1-4525-a199-a348966adc74" containerName="registry-server" containerID="cri-o://700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84" gracePeriod=2 Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.658995 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.769406 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gspvx\" (UniqueName: \"kubernetes.io/projected/bb81cacb-9fd1-4525-a199-a348966adc74-kube-api-access-gspvx\") pod \"bb81cacb-9fd1-4525-a199-a348966adc74\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.769666 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-catalog-content\") pod \"bb81cacb-9fd1-4525-a199-a348966adc74\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.769725 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-utilities\") pod \"bb81cacb-9fd1-4525-a199-a348966adc74\" (UID: \"bb81cacb-9fd1-4525-a199-a348966adc74\") " Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.771076 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-utilities" (OuterVolumeSpecName: "utilities") pod "bb81cacb-9fd1-4525-a199-a348966adc74" (UID: "bb81cacb-9fd1-4525-a199-a348966adc74"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.775384 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb81cacb-9fd1-4525-a199-a348966adc74-kube-api-access-gspvx" (OuterVolumeSpecName: "kube-api-access-gspvx") pod "bb81cacb-9fd1-4525-a199-a348966adc74" (UID: "bb81cacb-9fd1-4525-a199-a348966adc74"). InnerVolumeSpecName "kube-api-access-gspvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.871946 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gspvx\" (UniqueName: \"kubernetes.io/projected/bb81cacb-9fd1-4525-a199-a348966adc74-kube-api-access-gspvx\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.871987 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.894213 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb81cacb-9fd1-4525-a199-a348966adc74" (UID: "bb81cacb-9fd1-4525-a199-a348966adc74"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:33:06 crc kubenswrapper[5014]: I1205 11:33:06.973230 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb81cacb-9fd1-4525-a199-a348966adc74-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.138220 5014 generic.go:334] "Generic (PLEG): container finished" podID="bb81cacb-9fd1-4525-a199-a348966adc74" containerID="700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84" exitCode=0 Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.138283 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b6kp" event={"ID":"bb81cacb-9fd1-4525-a199-a348966adc74","Type":"ContainerDied","Data":"700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84"} Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.138320 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b6kp" event={"ID":"bb81cacb-9fd1-4525-a199-a348966adc74","Type":"ContainerDied","Data":"5651bf3382b5bdb5051fdfb6d88040882d7ff0aa21504780f324c5c83e274819"} Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.138342 5014 scope.go:117] "RemoveContainer" containerID="700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.138365 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5b6kp" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.162480 5014 scope.go:117] "RemoveContainer" containerID="409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.186390 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5b6kp"] Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.189740 5014 scope.go:117] "RemoveContainer" containerID="48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.195124 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5b6kp"] Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.245522 5014 scope.go:117] "RemoveContainer" containerID="700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84" Dec 05 11:33:07 crc kubenswrapper[5014]: E1205 11:33:07.246057 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84\": container with ID starting with 700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84 not found: ID does not exist" containerID="700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.246130 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84"} err="failed to get container status \"700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84\": rpc error: code = NotFound desc = could not find container \"700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84\": container with ID starting with 700d4447a9c0cc1d04b2f27861485dce3211306c02a2d07a9e073013a22c4f84 not found: ID does not exist" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.246171 5014 scope.go:117] "RemoveContainer" containerID="409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc" Dec 05 11:33:07 crc kubenswrapper[5014]: E1205 11:33:07.246873 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc\": container with ID starting with 409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc not found: ID does not exist" containerID="409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.246925 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc"} err="failed to get container status \"409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc\": rpc error: code = NotFound desc = could not find container \"409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc\": container with ID starting with 409357e364d42b96f2a8d6ef1dd1dea878e46f9e9090dff1cbe0106d8b3f85fc not found: ID does not exist" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.246954 5014 scope.go:117] "RemoveContainer" containerID="48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39" Dec 05 11:33:07 crc kubenswrapper[5014]: E1205 11:33:07.247203 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39\": container with ID starting with 48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39 not found: ID does not exist" containerID="48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.247243 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39"} err="failed to get container status \"48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39\": rpc error: code = NotFound desc = could not find container \"48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39\": container with ID starting with 48ec10397fa21927a5d887f1a7b6e6e79480898ad75334d4dd44d382167b9b39 not found: ID does not exist" Dec 05 11:33:07 crc kubenswrapper[5014]: I1205 11:33:07.329613 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb81cacb-9fd1-4525-a199-a348966adc74" path="/var/lib/kubelet/pods/bb81cacb-9fd1-4525-a199-a348966adc74/volumes" Dec 05 11:33:16 crc kubenswrapper[5014]: I1205 11:33:16.318571 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:33:17 crc kubenswrapper[5014]: I1205 11:33:17.252753 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"7c4baa164edbcc345074a71078d1ad8ee11a17f3ce646c791c35806062fcaebb"} Dec 05 11:34:24 crc kubenswrapper[5014]: I1205 11:34:24.895609 5014 generic.go:334] "Generic (PLEG): container finished" podID="1ef403f3-902c-41ac-874b-25627e6b5637" containerID="7540f9ea50308493c3fd206b89af4a7e73561fc763f526f05073ff17d0058b6d" exitCode=0 Dec 05 11:34:24 crc kubenswrapper[5014]: I1205 11:34:24.895711 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" event={"ID":"1ef403f3-902c-41ac-874b-25627e6b5637","Type":"ContainerDied","Data":"7540f9ea50308493c3fd206b89af4a7e73561fc763f526f05073ff17d0058b6d"} Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.346527 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.499421 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-combined-ca-bundle\") pod \"1ef403f3-902c-41ac-874b-25627e6b5637\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.500061 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-0\") pod \"1ef403f3-902c-41ac-874b-25627e6b5637\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.500156 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1ef403f3-902c-41ac-874b-25627e6b5637-nova-extra-config-0\") pod \"1ef403f3-902c-41ac-874b-25627e6b5637\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.500235 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-ssh-key\") pod \"1ef403f3-902c-41ac-874b-25627e6b5637\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.500383 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-0\") pod \"1ef403f3-902c-41ac-874b-25627e6b5637\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.500478 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-1\") pod \"1ef403f3-902c-41ac-874b-25627e6b5637\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.500863 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjtzm\" (UniqueName: \"kubernetes.io/projected/1ef403f3-902c-41ac-874b-25627e6b5637-kube-api-access-kjtzm\") pod \"1ef403f3-902c-41ac-874b-25627e6b5637\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.500979 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-inventory\") pod \"1ef403f3-902c-41ac-874b-25627e6b5637\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.501095 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-1\") pod \"1ef403f3-902c-41ac-874b-25627e6b5637\" (UID: \"1ef403f3-902c-41ac-874b-25627e6b5637\") " Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.506907 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ef403f3-902c-41ac-874b-25627e6b5637-kube-api-access-kjtzm" (OuterVolumeSpecName: "kube-api-access-kjtzm") pod "1ef403f3-902c-41ac-874b-25627e6b5637" (UID: "1ef403f3-902c-41ac-874b-25627e6b5637"). InnerVolumeSpecName "kube-api-access-kjtzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.508608 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "1ef403f3-902c-41ac-874b-25627e6b5637" (UID: "1ef403f3-902c-41ac-874b-25627e6b5637"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.573429 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "1ef403f3-902c-41ac-874b-25627e6b5637" (UID: "1ef403f3-902c-41ac-874b-25627e6b5637"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.577379 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1ef403f3-902c-41ac-874b-25627e6b5637" (UID: "1ef403f3-902c-41ac-874b-25627e6b5637"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.605185 5014 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.605222 5014 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.605234 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.605247 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjtzm\" (UniqueName: \"kubernetes.io/projected/1ef403f3-902c-41ac-874b-25627e6b5637-kube-api-access-kjtzm\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.617831 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "1ef403f3-902c-41ac-874b-25627e6b5637" (UID: "1ef403f3-902c-41ac-874b-25627e6b5637"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.621562 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-inventory" (OuterVolumeSpecName: "inventory") pod "1ef403f3-902c-41ac-874b-25627e6b5637" (UID: "1ef403f3-902c-41ac-874b-25627e6b5637"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.624422 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "1ef403f3-902c-41ac-874b-25627e6b5637" (UID: "1ef403f3-902c-41ac-874b-25627e6b5637"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.660953 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ef403f3-902c-41ac-874b-25627e6b5637-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "1ef403f3-902c-41ac-874b-25627e6b5637" (UID: "1ef403f3-902c-41ac-874b-25627e6b5637"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.661240 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "1ef403f3-902c-41ac-874b-25627e6b5637" (UID: "1ef403f3-902c-41ac-874b-25627e6b5637"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.707336 5014 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1ef403f3-902c-41ac-874b-25627e6b5637-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.707811 5014 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.707830 5014 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.707840 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.707849 5014 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1ef403f3-902c-41ac-874b-25627e6b5637-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.915055 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" event={"ID":"1ef403f3-902c-41ac-874b-25627e6b5637","Type":"ContainerDied","Data":"54ef8cf5989f6ef0967ea80ef8ad89ef6c95d34f31492c689769c0604f4b36ef"} Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.915094 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54ef8cf5989f6ef0967ea80ef8ad89ef6c95d34f31492c689769c0604f4b36ef" Dec 05 11:34:26 crc kubenswrapper[5014]: I1205 11:34:26.915152 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-hw7cx" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.024978 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6"] Dec 05 11:34:27 crc kubenswrapper[5014]: E1205 11:34:27.025529 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb81cacb-9fd1-4525-a199-a348966adc74" containerName="extract-content" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.025553 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb81cacb-9fd1-4525-a199-a348966adc74" containerName="extract-content" Dec 05 11:34:27 crc kubenswrapper[5014]: E1205 11:34:27.025567 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ef403f3-902c-41ac-874b-25627e6b5637" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.025575 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ef403f3-902c-41ac-874b-25627e6b5637" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 11:34:27 crc kubenswrapper[5014]: E1205 11:34:27.025594 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb81cacb-9fd1-4525-a199-a348966adc74" containerName="extract-utilities" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.025604 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb81cacb-9fd1-4525-a199-a348966adc74" containerName="extract-utilities" Dec 05 11:34:27 crc kubenswrapper[5014]: E1205 11:34:27.025623 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb81cacb-9fd1-4525-a199-a348966adc74" containerName="registry-server" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.025631 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb81cacb-9fd1-4525-a199-a348966adc74" containerName="registry-server" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.025881 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ef403f3-902c-41ac-874b-25627e6b5637" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.025917 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb81cacb-9fd1-4525-a199-a348966adc74" containerName="registry-server" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.026924 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.029475 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.030173 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-kxqpf" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.030402 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.030905 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.031179 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.032817 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6"] Dec 05 11:34:27 crc kubenswrapper[5014]: E1205 11:34:27.139847 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ef403f3_902c_41ac_874b_25627e6b5637.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ef403f3_902c_41ac_874b_25627e6b5637.slice/crio-54ef8cf5989f6ef0967ea80ef8ad89ef6c95d34f31492c689769c0604f4b36ef\": RecentStats: unable to find data in memory cache]" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.214866 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.215227 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.215319 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgfz8\" (UniqueName: \"kubernetes.io/projected/68801bb8-5aae-4367-9c85-a1c139ab1844-kube-api-access-hgfz8\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.215341 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.215384 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.215451 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.215501 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.317877 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgfz8\" (UniqueName: \"kubernetes.io/projected/68801bb8-5aae-4367-9c85-a1c139ab1844-kube-api-access-hgfz8\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.317996 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.318087 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.318194 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.318293 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.318345 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.318406 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.324823 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.325047 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.326554 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.329860 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.335704 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.339509 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.340766 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgfz8\" (UniqueName: \"kubernetes.io/projected/68801bb8-5aae-4367-9c85-a1c139ab1844-kube-api-access-hgfz8\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.343550 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.888478 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6"] Dec 05 11:34:27 crc kubenswrapper[5014]: I1205 11:34:27.927009 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" event={"ID":"68801bb8-5aae-4367-9c85-a1c139ab1844","Type":"ContainerStarted","Data":"8bbf9f60769c510b407573d0c7714ce14cb75d7d273d0791dee512be5d980eff"} Dec 05 11:34:28 crc kubenswrapper[5014]: I1205 11:34:28.938696 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" event={"ID":"68801bb8-5aae-4367-9c85-a1c139ab1844","Type":"ContainerStarted","Data":"782b186d9c0643f602e7cce1883b86a340fad05ce3e49d4886c1393ae60046a6"} Dec 05 11:34:28 crc kubenswrapper[5014]: I1205 11:34:28.971159 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" podStartSLOduration=1.493052708 podStartE2EDuration="1.971131455s" podCreationTimestamp="2025-12-05 11:34:27 +0000 UTC" firstStartedPulling="2025-12-05 11:34:27.897422924 +0000 UTC m=+2794.845540628" lastFinishedPulling="2025-12-05 11:34:28.375501671 +0000 UTC m=+2795.323619375" observedRunningTime="2025-12-05 11:34:28.959032848 +0000 UTC m=+2795.907150572" watchObservedRunningTime="2025-12-05 11:34:28.971131455 +0000 UTC m=+2795.919249169" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.558872 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zj7m7"] Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.561145 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.583051 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zj7m7"] Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.680424 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hdkb\" (UniqueName: \"kubernetes.io/projected/83cbcedb-43f2-4a1f-aaeb-25a276eeb253-kube-api-access-6hdkb\") pod \"redhat-marketplace-zj7m7\" (UID: \"83cbcedb-43f2-4a1f-aaeb-25a276eeb253\") " pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.680600 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83cbcedb-43f2-4a1f-aaeb-25a276eeb253-catalog-content\") pod \"redhat-marketplace-zj7m7\" (UID: \"83cbcedb-43f2-4a1f-aaeb-25a276eeb253\") " pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.680902 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83cbcedb-43f2-4a1f-aaeb-25a276eeb253-utilities\") pod \"redhat-marketplace-zj7m7\" (UID: \"83cbcedb-43f2-4a1f-aaeb-25a276eeb253\") " pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.782179 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83cbcedb-43f2-4a1f-aaeb-25a276eeb253-catalog-content\") pod \"redhat-marketplace-zj7m7\" (UID: \"83cbcedb-43f2-4a1f-aaeb-25a276eeb253\") " pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.782301 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83cbcedb-43f2-4a1f-aaeb-25a276eeb253-utilities\") pod \"redhat-marketplace-zj7m7\" (UID: \"83cbcedb-43f2-4a1f-aaeb-25a276eeb253\") " pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.782335 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hdkb\" (UniqueName: \"kubernetes.io/projected/83cbcedb-43f2-4a1f-aaeb-25a276eeb253-kube-api-access-6hdkb\") pod \"redhat-marketplace-zj7m7\" (UID: \"83cbcedb-43f2-4a1f-aaeb-25a276eeb253\") " pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.782886 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83cbcedb-43f2-4a1f-aaeb-25a276eeb253-catalog-content\") pod \"redhat-marketplace-zj7m7\" (UID: \"83cbcedb-43f2-4a1f-aaeb-25a276eeb253\") " pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.782974 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83cbcedb-43f2-4a1f-aaeb-25a276eeb253-utilities\") pod \"redhat-marketplace-zj7m7\" (UID: \"83cbcedb-43f2-4a1f-aaeb-25a276eeb253\") " pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.809149 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hdkb\" (UniqueName: \"kubernetes.io/projected/83cbcedb-43f2-4a1f-aaeb-25a276eeb253-kube-api-access-6hdkb\") pod \"redhat-marketplace-zj7m7\" (UID: \"83cbcedb-43f2-4a1f-aaeb-25a276eeb253\") " pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:30 crc kubenswrapper[5014]: I1205 11:34:30.889002 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:31 crc kubenswrapper[5014]: I1205 11:34:31.473539 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zj7m7"] Dec 05 11:34:31 crc kubenswrapper[5014]: I1205 11:34:31.965018 5014 generic.go:334] "Generic (PLEG): container finished" podID="83cbcedb-43f2-4a1f-aaeb-25a276eeb253" containerID="2cede0799f426795f33ca50e6e5024e05be79abe8b1d0490ca5edfe4429628bf" exitCode=0 Dec 05 11:34:31 crc kubenswrapper[5014]: I1205 11:34:31.965104 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj7m7" event={"ID":"83cbcedb-43f2-4a1f-aaeb-25a276eeb253","Type":"ContainerDied","Data":"2cede0799f426795f33ca50e6e5024e05be79abe8b1d0490ca5edfe4429628bf"} Dec 05 11:34:31 crc kubenswrapper[5014]: I1205 11:34:31.965318 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj7m7" event={"ID":"83cbcedb-43f2-4a1f-aaeb-25a276eeb253","Type":"ContainerStarted","Data":"0db8cfdb5ffb5abca3035d269e95bffd37ade0e93e33c456968c137e6b83e8d1"} Dec 05 11:34:34 crc kubenswrapper[5014]: I1205 11:34:34.993389 5014 generic.go:334] "Generic (PLEG): container finished" podID="83cbcedb-43f2-4a1f-aaeb-25a276eeb253" containerID="91c8aea351de9003b92e70a14352687e90d35fa0a50701daa5f1fec591d4a8f2" exitCode=0 Dec 05 11:34:34 crc kubenswrapper[5014]: I1205 11:34:34.993438 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj7m7" event={"ID":"83cbcedb-43f2-4a1f-aaeb-25a276eeb253","Type":"ContainerDied","Data":"91c8aea351de9003b92e70a14352687e90d35fa0a50701daa5f1fec591d4a8f2"} Dec 05 11:34:36 crc kubenswrapper[5014]: I1205 11:34:36.011391 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj7m7" event={"ID":"83cbcedb-43f2-4a1f-aaeb-25a276eeb253","Type":"ContainerStarted","Data":"c6d1aa6df137faa7045fe93cd29279222750af224e6bba169caa4f8c4bfaabd3"} Dec 05 11:34:36 crc kubenswrapper[5014]: I1205 11:34:36.032229 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zj7m7" podStartSLOduration=2.321614127 podStartE2EDuration="6.032208776s" podCreationTimestamp="2025-12-05 11:34:30 +0000 UTC" firstStartedPulling="2025-12-05 11:34:31.967076716 +0000 UTC m=+2798.915194420" lastFinishedPulling="2025-12-05 11:34:35.677671365 +0000 UTC m=+2802.625789069" observedRunningTime="2025-12-05 11:34:36.027366088 +0000 UTC m=+2802.975483812" watchObservedRunningTime="2025-12-05 11:34:36.032208776 +0000 UTC m=+2802.980326500" Dec 05 11:34:40 crc kubenswrapper[5014]: I1205 11:34:40.890170 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:40 crc kubenswrapper[5014]: I1205 11:34:40.890781 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:40 crc kubenswrapper[5014]: I1205 11:34:40.939440 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:41 crc kubenswrapper[5014]: I1205 11:34:41.095489 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zj7m7" Dec 05 11:34:41 crc kubenswrapper[5014]: I1205 11:34:41.717555 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zj7m7"] Dec 05 11:34:41 crc kubenswrapper[5014]: I1205 11:34:41.790196 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2mqd"] Dec 05 11:34:41 crc kubenswrapper[5014]: I1205 11:34:41.790574 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n2mqd" podUID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerName="registry-server" containerID="cri-o://5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d" gracePeriod=2 Dec 05 11:34:42 crc kubenswrapper[5014]: I1205 11:34:42.743223 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 11:34:42 crc kubenswrapper[5014]: I1205 11:34:42.918728 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnmtt\" (UniqueName: \"kubernetes.io/projected/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-kube-api-access-wnmtt\") pod \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " Dec 05 11:34:42 crc kubenswrapper[5014]: I1205 11:34:42.918804 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-utilities\") pod \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " Dec 05 11:34:42 crc kubenswrapper[5014]: I1205 11:34:42.918841 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-catalog-content\") pod \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\" (UID: \"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd\") " Dec 05 11:34:42 crc kubenswrapper[5014]: I1205 11:34:42.919594 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-utilities" (OuterVolumeSpecName: "utilities") pod "1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" (UID: "1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:34:42 crc kubenswrapper[5014]: I1205 11:34:42.927544 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-kube-api-access-wnmtt" (OuterVolumeSpecName: "kube-api-access-wnmtt") pod "1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" (UID: "1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd"). InnerVolumeSpecName "kube-api-access-wnmtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:34:42 crc kubenswrapper[5014]: I1205 11:34:42.936537 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" (UID: "1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.020839 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnmtt\" (UniqueName: \"kubernetes.io/projected/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-kube-api-access-wnmtt\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.020877 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.020886 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.067912 5014 generic.go:334] "Generic (PLEG): container finished" podID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerID="5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d" exitCode=0 Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.067951 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2mqd" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.068000 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2mqd" event={"ID":"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd","Type":"ContainerDied","Data":"5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d"} Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.068066 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2mqd" event={"ID":"1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd","Type":"ContainerDied","Data":"1988e72f09613b4abf8b485951fa38a1987ed0c82620bdf275d03e9972707425"} Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.068090 5014 scope.go:117] "RemoveContainer" containerID="5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.098891 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2mqd"] Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.099153 5014 scope.go:117] "RemoveContainer" containerID="f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.106990 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2mqd"] Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.119705 5014 scope.go:117] "RemoveContainer" containerID="dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.158777 5014 scope.go:117] "RemoveContainer" containerID="5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d" Dec 05 11:34:43 crc kubenswrapper[5014]: E1205 11:34:43.159521 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d\": container with ID starting with 5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d not found: ID does not exist" containerID="5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.159556 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d"} err="failed to get container status \"5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d\": rpc error: code = NotFound desc = could not find container \"5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d\": container with ID starting with 5c0cad36f51907f6566dad0e30dc45911edbef20d9e428effa88160d71aca68d not found: ID does not exist" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.159579 5014 scope.go:117] "RemoveContainer" containerID="f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938" Dec 05 11:34:43 crc kubenswrapper[5014]: E1205 11:34:43.159835 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938\": container with ID starting with f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938 not found: ID does not exist" containerID="f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.159876 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938"} err="failed to get container status \"f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938\": rpc error: code = NotFound desc = could not find container \"f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938\": container with ID starting with f1df69fd4f10ab14e574802acdf310d9ac4b81bcf3672f4f4ff9f47d5a7ec938 not found: ID does not exist" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.159896 5014 scope.go:117] "RemoveContainer" containerID="dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6" Dec 05 11:34:43 crc kubenswrapper[5014]: E1205 11:34:43.160159 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6\": container with ID starting with dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6 not found: ID does not exist" containerID="dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.160182 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6"} err="failed to get container status \"dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6\": rpc error: code = NotFound desc = could not find container \"dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6\": container with ID starting with dcaa49999ac083ba5b4dd74a667272f3aba884cec53f55981ceb63e84a385fa6 not found: ID does not exist" Dec 05 11:34:43 crc kubenswrapper[5014]: I1205 11:34:43.330816 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" path="/var/lib/kubelet/pods/1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd/volumes" Dec 05 11:35:32 crc kubenswrapper[5014]: I1205 11:35:32.936570 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:35:32 crc kubenswrapper[5014]: I1205 11:35:32.936980 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:36:02 crc kubenswrapper[5014]: I1205 11:36:02.937371 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:36:02 crc kubenswrapper[5014]: I1205 11:36:02.937957 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.388805 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l72rm"] Dec 05 11:36:26 crc kubenswrapper[5014]: E1205 11:36:26.389808 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerName="extract-content" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.389825 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerName="extract-content" Dec 05 11:36:26 crc kubenswrapper[5014]: E1205 11:36:26.389857 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerName="registry-server" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.389865 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerName="registry-server" Dec 05 11:36:26 crc kubenswrapper[5014]: E1205 11:36:26.389903 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerName="extract-utilities" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.389911 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerName="extract-utilities" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.390142 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b4aa1ae-2e33-4ac0-b782-6f24111bc9dd" containerName="registry-server" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.391700 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.402496 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l72rm"] Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.580061 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrrq2\" (UniqueName: \"kubernetes.io/projected/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-kube-api-access-lrrq2\") pod \"certified-operators-l72rm\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.580129 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-catalog-content\") pod \"certified-operators-l72rm\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.581162 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-utilities\") pod \"certified-operators-l72rm\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.682960 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-utilities\") pod \"certified-operators-l72rm\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.683264 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrrq2\" (UniqueName: \"kubernetes.io/projected/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-kube-api-access-lrrq2\") pod \"certified-operators-l72rm\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.683314 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-catalog-content\") pod \"certified-operators-l72rm\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.683555 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-utilities\") pod \"certified-operators-l72rm\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.683753 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-catalog-content\") pod \"certified-operators-l72rm\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.705112 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrrq2\" (UniqueName: \"kubernetes.io/projected/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-kube-api-access-lrrq2\") pod \"certified-operators-l72rm\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:26 crc kubenswrapper[5014]: I1205 11:36:26.715393 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:27 crc kubenswrapper[5014]: I1205 11:36:27.188435 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l72rm"] Dec 05 11:36:28 crc kubenswrapper[5014]: I1205 11:36:28.091160 5014 generic.go:334] "Generic (PLEG): container finished" podID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerID="eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355" exitCode=0 Dec 05 11:36:28 crc kubenswrapper[5014]: I1205 11:36:28.091459 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l72rm" event={"ID":"7a23aac7-7c7f-4d77-b68b-0d421f8978ca","Type":"ContainerDied","Data":"eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355"} Dec 05 11:36:28 crc kubenswrapper[5014]: I1205 11:36:28.092530 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l72rm" event={"ID":"7a23aac7-7c7f-4d77-b68b-0d421f8978ca","Type":"ContainerStarted","Data":"939b18d4f9529a1984db6e12d9f6c66ee55f2033c5c0e20d2ea3f5619aa152de"} Dec 05 11:36:30 crc kubenswrapper[5014]: I1205 11:36:30.113360 5014 generic.go:334] "Generic (PLEG): container finished" podID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerID="439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d" exitCode=0 Dec 05 11:36:30 crc kubenswrapper[5014]: I1205 11:36:30.113884 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l72rm" event={"ID":"7a23aac7-7c7f-4d77-b68b-0d421f8978ca","Type":"ContainerDied","Data":"439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d"} Dec 05 11:36:32 crc kubenswrapper[5014]: I1205 11:36:32.135024 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l72rm" event={"ID":"7a23aac7-7c7f-4d77-b68b-0d421f8978ca","Type":"ContainerStarted","Data":"edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4"} Dec 05 11:36:32 crc kubenswrapper[5014]: I1205 11:36:32.161613 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l72rm" podStartSLOduration=3.322928863 podStartE2EDuration="6.16159349s" podCreationTimestamp="2025-12-05 11:36:26 +0000 UTC" firstStartedPulling="2025-12-05 11:36:28.093924766 +0000 UTC m=+2915.042042470" lastFinishedPulling="2025-12-05 11:36:30.932589383 +0000 UTC m=+2917.880707097" observedRunningTime="2025-12-05 11:36:32.153612884 +0000 UTC m=+2919.101730598" watchObservedRunningTime="2025-12-05 11:36:32.16159349 +0000 UTC m=+2919.109711194" Dec 05 11:36:32 crc kubenswrapper[5014]: I1205 11:36:32.936630 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:36:32 crc kubenswrapper[5014]: I1205 11:36:32.936699 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:36:32 crc kubenswrapper[5014]: I1205 11:36:32.936741 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:36:32 crc kubenswrapper[5014]: I1205 11:36:32.937465 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c4baa164edbcc345074a71078d1ad8ee11a17f3ce646c791c35806062fcaebb"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:36:32 crc kubenswrapper[5014]: I1205 11:36:32.937525 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://7c4baa164edbcc345074a71078d1ad8ee11a17f3ce646c791c35806062fcaebb" gracePeriod=600 Dec 05 11:36:33 crc kubenswrapper[5014]: I1205 11:36:33.146573 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="7c4baa164edbcc345074a71078d1ad8ee11a17f3ce646c791c35806062fcaebb" exitCode=0 Dec 05 11:36:33 crc kubenswrapper[5014]: I1205 11:36:33.146632 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"7c4baa164edbcc345074a71078d1ad8ee11a17f3ce646c791c35806062fcaebb"} Dec 05 11:36:33 crc kubenswrapper[5014]: I1205 11:36:33.146892 5014 scope.go:117] "RemoveContainer" containerID="74ce5fb2ccad7cb7b264e5910ffaf26784ba4c697729abd609f64f7ea702e43a" Dec 05 11:36:34 crc kubenswrapper[5014]: I1205 11:36:34.159468 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99"} Dec 05 11:36:36 crc kubenswrapper[5014]: I1205 11:36:36.716619 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:36 crc kubenswrapper[5014]: I1205 11:36:36.718455 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:36 crc kubenswrapper[5014]: I1205 11:36:36.791357 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:37 crc kubenswrapper[5014]: I1205 11:36:37.269802 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:37 crc kubenswrapper[5014]: I1205 11:36:37.328726 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l72rm"] Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.220535 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l72rm" podUID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerName="registry-server" containerID="cri-o://edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4" gracePeriod=2 Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.700868 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.839004 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-catalog-content\") pod \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.839095 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-utilities\") pod \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.839193 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrrq2\" (UniqueName: \"kubernetes.io/projected/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-kube-api-access-lrrq2\") pod \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\" (UID: \"7a23aac7-7c7f-4d77-b68b-0d421f8978ca\") " Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.839890 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-utilities" (OuterVolumeSpecName: "utilities") pod "7a23aac7-7c7f-4d77-b68b-0d421f8978ca" (UID: "7a23aac7-7c7f-4d77-b68b-0d421f8978ca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.845526 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-kube-api-access-lrrq2" (OuterVolumeSpecName: "kube-api-access-lrrq2") pod "7a23aac7-7c7f-4d77-b68b-0d421f8978ca" (UID: "7a23aac7-7c7f-4d77-b68b-0d421f8978ca"). InnerVolumeSpecName "kube-api-access-lrrq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.888945 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a23aac7-7c7f-4d77-b68b-0d421f8978ca" (UID: "7a23aac7-7c7f-4d77-b68b-0d421f8978ca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.941445 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrrq2\" (UniqueName: \"kubernetes.io/projected/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-kube-api-access-lrrq2\") on node \"crc\" DevicePath \"\"" Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.941495 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:36:39 crc kubenswrapper[5014]: I1205 11:36:39.941507 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a23aac7-7c7f-4d77-b68b-0d421f8978ca-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.231244 5014 generic.go:334] "Generic (PLEG): container finished" podID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerID="edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4" exitCode=0 Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.231306 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l72rm" event={"ID":"7a23aac7-7c7f-4d77-b68b-0d421f8978ca","Type":"ContainerDied","Data":"edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4"} Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.231360 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l72rm" event={"ID":"7a23aac7-7c7f-4d77-b68b-0d421f8978ca","Type":"ContainerDied","Data":"939b18d4f9529a1984db6e12d9f6c66ee55f2033c5c0e20d2ea3f5619aa152de"} Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.231381 5014 scope.go:117] "RemoveContainer" containerID="edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4" Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.232315 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l72rm" Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.251908 5014 scope.go:117] "RemoveContainer" containerID="439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d" Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.264984 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l72rm"] Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.275782 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l72rm"] Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.280603 5014 scope.go:117] "RemoveContainer" containerID="eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355" Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.316263 5014 scope.go:117] "RemoveContainer" containerID="edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4" Dec 05 11:36:40 crc kubenswrapper[5014]: E1205 11:36:40.316735 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4\": container with ID starting with edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4 not found: ID does not exist" containerID="edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4" Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.316763 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4"} err="failed to get container status \"edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4\": rpc error: code = NotFound desc = could not find container \"edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4\": container with ID starting with edf197181e98df8be40b71e7e30e3dafe4766361d1da68da69817957141011e4 not found: ID does not exist" Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.316782 5014 scope.go:117] "RemoveContainer" containerID="439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d" Dec 05 11:36:40 crc kubenswrapper[5014]: E1205 11:36:40.317079 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d\": container with ID starting with 439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d not found: ID does not exist" containerID="439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d" Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.317105 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d"} err="failed to get container status \"439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d\": rpc error: code = NotFound desc = could not find container \"439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d\": container with ID starting with 439c96e3170170fcc5c423b24753db5580fafe24bf34053d032959fcfaf1ea6d not found: ID does not exist" Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.317120 5014 scope.go:117] "RemoveContainer" containerID="eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355" Dec 05 11:36:40 crc kubenswrapper[5014]: E1205 11:36:40.317452 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355\": container with ID starting with eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355 not found: ID does not exist" containerID="eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355" Dec 05 11:36:40 crc kubenswrapper[5014]: I1205 11:36:40.317491 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355"} err="failed to get container status \"eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355\": rpc error: code = NotFound desc = could not find container \"eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355\": container with ID starting with eb86fc545a87d11b97b7524e6aff8fe334bb57c9162f284c616a083715d0a355 not found: ID does not exist" Dec 05 11:36:41 crc kubenswrapper[5014]: I1205 11:36:41.329896 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" path="/var/lib/kubelet/pods/7a23aac7-7c7f-4d77-b68b-0d421f8978ca/volumes" Dec 05 11:36:52 crc kubenswrapper[5014]: I1205 11:36:52.336180 5014 generic.go:334] "Generic (PLEG): container finished" podID="68801bb8-5aae-4367-9c85-a1c139ab1844" containerID="782b186d9c0643f602e7cce1883b86a340fad05ce3e49d4886c1393ae60046a6" exitCode=0 Dec 05 11:36:52 crc kubenswrapper[5014]: I1205 11:36:52.336254 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" event={"ID":"68801bb8-5aae-4367-9c85-a1c139ab1844","Type":"ContainerDied","Data":"782b186d9c0643f602e7cce1883b86a340fad05ce3e49d4886c1393ae60046a6"} Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.754201 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.898333 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-0\") pod \"68801bb8-5aae-4367-9c85-a1c139ab1844\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.898429 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-1\") pod \"68801bb8-5aae-4367-9c85-a1c139ab1844\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.898485 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-telemetry-combined-ca-bundle\") pod \"68801bb8-5aae-4367-9c85-a1c139ab1844\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.898639 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgfz8\" (UniqueName: \"kubernetes.io/projected/68801bb8-5aae-4367-9c85-a1c139ab1844-kube-api-access-hgfz8\") pod \"68801bb8-5aae-4367-9c85-a1c139ab1844\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.898694 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-2\") pod \"68801bb8-5aae-4367-9c85-a1c139ab1844\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.898742 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ssh-key\") pod \"68801bb8-5aae-4367-9c85-a1c139ab1844\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.898783 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-inventory\") pod \"68801bb8-5aae-4367-9c85-a1c139ab1844\" (UID: \"68801bb8-5aae-4367-9c85-a1c139ab1844\") " Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.905873 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68801bb8-5aae-4367-9c85-a1c139ab1844-kube-api-access-hgfz8" (OuterVolumeSpecName: "kube-api-access-hgfz8") pod "68801bb8-5aae-4367-9c85-a1c139ab1844" (UID: "68801bb8-5aae-4367-9c85-a1c139ab1844"). InnerVolumeSpecName "kube-api-access-hgfz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.908039 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "68801bb8-5aae-4367-9c85-a1c139ab1844" (UID: "68801bb8-5aae-4367-9c85-a1c139ab1844"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.928682 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "68801bb8-5aae-4367-9c85-a1c139ab1844" (UID: "68801bb8-5aae-4367-9c85-a1c139ab1844"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.929006 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "68801bb8-5aae-4367-9c85-a1c139ab1844" (UID: "68801bb8-5aae-4367-9c85-a1c139ab1844"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.930385 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-inventory" (OuterVolumeSpecName: "inventory") pod "68801bb8-5aae-4367-9c85-a1c139ab1844" (UID: "68801bb8-5aae-4367-9c85-a1c139ab1844"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.931611 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "68801bb8-5aae-4367-9c85-a1c139ab1844" (UID: "68801bb8-5aae-4367-9c85-a1c139ab1844"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:36:53 crc kubenswrapper[5014]: I1205 11:36:53.939601 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "68801bb8-5aae-4367-9c85-a1c139ab1844" (UID: "68801bb8-5aae-4367-9c85-a1c139ab1844"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:36:54 crc kubenswrapper[5014]: I1205 11:36:54.006503 5014 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 11:36:54 crc kubenswrapper[5014]: I1205 11:36:54.006701 5014 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 11:36:54 crc kubenswrapper[5014]: I1205 11:36:54.006762 5014 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 11:36:54 crc kubenswrapper[5014]: I1205 11:36:54.006848 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgfz8\" (UniqueName: \"kubernetes.io/projected/68801bb8-5aae-4367-9c85-a1c139ab1844-kube-api-access-hgfz8\") on node \"crc\" DevicePath \"\"" Dec 05 11:36:54 crc kubenswrapper[5014]: I1205 11:36:54.006914 5014 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 11:36:54 crc kubenswrapper[5014]: I1205 11:36:54.006986 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:36:54 crc kubenswrapper[5014]: I1205 11:36:54.007046 5014 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68801bb8-5aae-4367-9c85-a1c139ab1844-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 11:36:54 crc kubenswrapper[5014]: I1205 11:36:54.355876 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" event={"ID":"68801bb8-5aae-4367-9c85-a1c139ab1844","Type":"ContainerDied","Data":"8bbf9f60769c510b407573d0c7714ce14cb75d7d273d0791dee512be5d980eff"} Dec 05 11:36:54 crc kubenswrapper[5014]: I1205 11:36:54.355913 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8bbf9f60769c510b407573d0c7714ce14cb75d7d273d0791dee512be5d980eff" Dec 05 11:36:54 crc kubenswrapper[5014]: I1205 11:36:54.355919 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.744382 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 11:37:53 crc kubenswrapper[5014]: E1205 11:37:53.745755 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerName="extract-content" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.745777 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerName="extract-content" Dec 05 11:37:53 crc kubenswrapper[5014]: E1205 11:37:53.745850 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68801bb8-5aae-4367-9c85-a1c139ab1844" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.745867 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="68801bb8-5aae-4367-9c85-a1c139ab1844" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 11:37:53 crc kubenswrapper[5014]: E1205 11:37:53.745899 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerName="registry-server" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.745908 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerName="registry-server" Dec 05 11:37:53 crc kubenswrapper[5014]: E1205 11:37:53.745944 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerName="extract-utilities" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.745954 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerName="extract-utilities" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.746225 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="68801bb8-5aae-4367-9c85-a1c139ab1844" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.746260 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a23aac7-7c7f-4d77-b68b-0d421f8978ca" containerName="registry-server" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.747295 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.751333 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-5rq69" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.751709 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.751752 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.751783 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.761319 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.849638 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.850117 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-config-data\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.850312 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf5l5\" (UniqueName: \"kubernetes.io/projected/3f886993-57e9-4023-8186-8fbdeb4fe04c-kube-api-access-mf5l5\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.850471 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.850653 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.850805 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.850947 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.851136 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.851332 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.953459 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.953807 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-config-data\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.953955 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf5l5\" (UniqueName: \"kubernetes.io/projected/3f886993-57e9-4023-8186-8fbdeb4fe04c-kube-api-access-mf5l5\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.954125 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.954723 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.955514 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-config-data\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.955685 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.955842 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.956017 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.956249 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.956440 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.956376 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.960287 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.961089 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.963570 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.966491 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.967367 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.978393 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf5l5\" (UniqueName: \"kubernetes.io/projected/3f886993-57e9-4023-8186-8fbdeb4fe04c-kube-api-access-mf5l5\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:53 crc kubenswrapper[5014]: I1205 11:37:53.986615 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " pod="openstack/tempest-tests-tempest" Dec 05 11:37:54 crc kubenswrapper[5014]: I1205 11:37:54.078742 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 11:37:54 crc kubenswrapper[5014]: I1205 11:37:54.550448 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 11:37:54 crc kubenswrapper[5014]: W1205 11:37:54.558968 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f886993_57e9_4023_8186_8fbdeb4fe04c.slice/crio-5532021ed9e3a653abb213135604034f96a2b83a1bac15060d63e329eea5973e WatchSource:0}: Error finding container 5532021ed9e3a653abb213135604034f96a2b83a1bac15060d63e329eea5973e: Status 404 returned error can't find the container with id 5532021ed9e3a653abb213135604034f96a2b83a1bac15060d63e329eea5973e Dec 05 11:37:54 crc kubenswrapper[5014]: I1205 11:37:54.560823 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:37:54 crc kubenswrapper[5014]: I1205 11:37:54.919966 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3f886993-57e9-4023-8186-8fbdeb4fe04c","Type":"ContainerStarted","Data":"5532021ed9e3a653abb213135604034f96a2b83a1bac15060d63e329eea5973e"} Dec 05 11:38:29 crc kubenswrapper[5014]: E1205 11:38:29.778325 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 05 11:38:29 crc kubenswrapper[5014]: E1205 11:38:29.779832 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mf5l5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(3f886993-57e9-4023-8186-8fbdeb4fe04c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:38:29 crc kubenswrapper[5014]: E1205 11:38:29.781110 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="3f886993-57e9-4023-8186-8fbdeb4fe04c" Dec 05 11:38:30 crc kubenswrapper[5014]: E1205 11:38:30.264987 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="3f886993-57e9-4023-8186-8fbdeb4fe04c" Dec 05 11:38:45 crc kubenswrapper[5014]: I1205 11:38:45.389248 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3f886993-57e9-4023-8186-8fbdeb4fe04c","Type":"ContainerStarted","Data":"10e5b986139e32dc80abbf4951e8a804d1d45c53870158f7e317a60c78e0f921"} Dec 05 11:38:45 crc kubenswrapper[5014]: I1205 11:38:45.424924 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.085479419 podStartE2EDuration="53.424903437s" podCreationTimestamp="2025-12-05 11:37:52 +0000 UTC" firstStartedPulling="2025-12-05 11:37:54.560612113 +0000 UTC m=+3001.508729817" lastFinishedPulling="2025-12-05 11:38:43.900036131 +0000 UTC m=+3050.848153835" observedRunningTime="2025-12-05 11:38:45.412147923 +0000 UTC m=+3052.360265657" watchObservedRunningTime="2025-12-05 11:38:45.424903437 +0000 UTC m=+3052.373021141" Dec 05 11:39:02 crc kubenswrapper[5014]: I1205 11:39:02.936959 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:39:02 crc kubenswrapper[5014]: I1205 11:39:02.937545 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:39:32 crc kubenswrapper[5014]: I1205 11:39:32.936325 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:39:32 crc kubenswrapper[5014]: I1205 11:39:32.936900 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:40:02 crc kubenswrapper[5014]: I1205 11:40:02.937201 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:40:02 crc kubenswrapper[5014]: I1205 11:40:02.938021 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:40:02 crc kubenswrapper[5014]: I1205 11:40:02.938105 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:40:02 crc kubenswrapper[5014]: I1205 11:40:02.939678 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:40:02 crc kubenswrapper[5014]: I1205 11:40:02.939750 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" gracePeriod=600 Dec 05 11:40:03 crc kubenswrapper[5014]: E1205 11:40:03.081514 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:40:03 crc kubenswrapper[5014]: I1205 11:40:03.161515 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" exitCode=0 Dec 05 11:40:03 crc kubenswrapper[5014]: I1205 11:40:03.161549 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99"} Dec 05 11:40:03 crc kubenswrapper[5014]: I1205 11:40:03.161621 5014 scope.go:117] "RemoveContainer" containerID="7c4baa164edbcc345074a71078d1ad8ee11a17f3ce646c791c35806062fcaebb" Dec 05 11:40:03 crc kubenswrapper[5014]: I1205 11:40:03.162467 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:40:03 crc kubenswrapper[5014]: E1205 11:40:03.162721 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:40:17 crc kubenswrapper[5014]: I1205 11:40:17.319252 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:40:17 crc kubenswrapper[5014]: E1205 11:40:17.320206 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:40:28 crc kubenswrapper[5014]: I1205 11:40:28.317973 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:40:28 crc kubenswrapper[5014]: E1205 11:40:28.318764 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:40:41 crc kubenswrapper[5014]: I1205 11:40:41.318457 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:40:41 crc kubenswrapper[5014]: E1205 11:40:41.319479 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:40:53 crc kubenswrapper[5014]: I1205 11:40:53.334998 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:40:53 crc kubenswrapper[5014]: E1205 11:40:53.336165 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:41:05 crc kubenswrapper[5014]: I1205 11:41:05.318798 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:41:05 crc kubenswrapper[5014]: E1205 11:41:05.319552 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:41:17 crc kubenswrapper[5014]: I1205 11:41:17.322120 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:41:17 crc kubenswrapper[5014]: E1205 11:41:17.323925 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:41:31 crc kubenswrapper[5014]: I1205 11:41:31.318996 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:41:31 crc kubenswrapper[5014]: E1205 11:41:31.319749 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:41:46 crc kubenswrapper[5014]: I1205 11:41:46.318794 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:41:46 crc kubenswrapper[5014]: E1205 11:41:46.319794 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:41:58 crc kubenswrapper[5014]: I1205 11:41:58.319658 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:41:58 crc kubenswrapper[5014]: E1205 11:41:58.320442 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:42:12 crc kubenswrapper[5014]: I1205 11:42:12.318679 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:42:12 crc kubenswrapper[5014]: E1205 11:42:12.319611 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:42:26 crc kubenswrapper[5014]: I1205 11:42:26.318864 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:42:26 crc kubenswrapper[5014]: E1205 11:42:26.320173 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:42:39 crc kubenswrapper[5014]: I1205 11:42:39.319726 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:42:39 crc kubenswrapper[5014]: E1205 11:42:39.321356 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:42:54 crc kubenswrapper[5014]: I1205 11:42:54.318750 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:42:54 crc kubenswrapper[5014]: E1205 11:42:54.320686 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:43:05 crc kubenswrapper[5014]: I1205 11:43:05.318500 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:43:05 crc kubenswrapper[5014]: E1205 11:43:05.319233 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:43:16 crc kubenswrapper[5014]: I1205 11:43:16.318940 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:43:16 crc kubenswrapper[5014]: E1205 11:43:16.319820 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:43:27 crc kubenswrapper[5014]: I1205 11:43:27.318418 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:43:27 crc kubenswrapper[5014]: E1205 11:43:27.319189 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.644220 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tfm8d"] Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.647399 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.656923 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tfm8d"] Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.726797 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-utilities\") pod \"redhat-operators-tfm8d\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.726849 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-catalog-content\") pod \"redhat-operators-tfm8d\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.726963 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms6kg\" (UniqueName: \"kubernetes.io/projected/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-kube-api-access-ms6kg\") pod \"redhat-operators-tfm8d\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.828813 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-utilities\") pod \"redhat-operators-tfm8d\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.828873 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-catalog-content\") pod \"redhat-operators-tfm8d\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.828952 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms6kg\" (UniqueName: \"kubernetes.io/projected/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-kube-api-access-ms6kg\") pod \"redhat-operators-tfm8d\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.829397 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-utilities\") pod \"redhat-operators-tfm8d\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.829455 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-catalog-content\") pod \"redhat-operators-tfm8d\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.851045 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms6kg\" (UniqueName: \"kubernetes.io/projected/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-kube-api-access-ms6kg\") pod \"redhat-operators-tfm8d\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:37 crc kubenswrapper[5014]: I1205 11:43:37.981512 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:38 crc kubenswrapper[5014]: I1205 11:43:38.514536 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tfm8d"] Dec 05 11:43:39 crc kubenswrapper[5014]: I1205 11:43:39.213893 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfm8d" event={"ID":"c94a286e-5eae-4c61-bfe6-c8b90310bbe1","Type":"ContainerStarted","Data":"fd969cb1eec18d9eed56650257fc0839c80177f26491ba2c631b2eb76c42a645"} Dec 05 11:43:40 crc kubenswrapper[5014]: I1205 11:43:40.224577 5014 generic.go:334] "Generic (PLEG): container finished" podID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerID="b29944786c194ef27cd8b67c558509c919793aff2669acaca78dad96e4d6e570" exitCode=0 Dec 05 11:43:40 crc kubenswrapper[5014]: I1205 11:43:40.224623 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfm8d" event={"ID":"c94a286e-5eae-4c61-bfe6-c8b90310bbe1","Type":"ContainerDied","Data":"b29944786c194ef27cd8b67c558509c919793aff2669acaca78dad96e4d6e570"} Dec 05 11:43:40 crc kubenswrapper[5014]: I1205 11:43:40.226294 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:43:40 crc kubenswrapper[5014]: I1205 11:43:40.318399 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:43:40 crc kubenswrapper[5014]: E1205 11:43:40.318953 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:43:41 crc kubenswrapper[5014]: I1205 11:43:41.237989 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfm8d" event={"ID":"c94a286e-5eae-4c61-bfe6-c8b90310bbe1","Type":"ContainerStarted","Data":"d9407cdefd2800b7642cc9fab1140d931727cb5850ddf561038b8b5d2acb2aac"} Dec 05 11:43:42 crc kubenswrapper[5014]: I1205 11:43:42.248664 5014 generic.go:334] "Generic (PLEG): container finished" podID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerID="d9407cdefd2800b7642cc9fab1140d931727cb5850ddf561038b8b5d2acb2aac" exitCode=0 Dec 05 11:43:42 crc kubenswrapper[5014]: I1205 11:43:42.248738 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfm8d" event={"ID":"c94a286e-5eae-4c61-bfe6-c8b90310bbe1","Type":"ContainerDied","Data":"d9407cdefd2800b7642cc9fab1140d931727cb5850ddf561038b8b5d2acb2aac"} Dec 05 11:43:43 crc kubenswrapper[5014]: I1205 11:43:43.266087 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfm8d" event={"ID":"c94a286e-5eae-4c61-bfe6-c8b90310bbe1","Type":"ContainerStarted","Data":"227e3cc1e602ac13f0f0040f9659438ec9c48722c397604af83fedd3ee3ec073"} Dec 05 11:43:43 crc kubenswrapper[5014]: I1205 11:43:43.285652 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tfm8d" podStartSLOduration=3.841591132 podStartE2EDuration="6.285627432s" podCreationTimestamp="2025-12-05 11:43:37 +0000 UTC" firstStartedPulling="2025-12-05 11:43:40.225967896 +0000 UTC m=+3347.174085600" lastFinishedPulling="2025-12-05 11:43:42.670004206 +0000 UTC m=+3349.618121900" observedRunningTime="2025-12-05 11:43:43.281979822 +0000 UTC m=+3350.230097526" watchObservedRunningTime="2025-12-05 11:43:43.285627432 +0000 UTC m=+3350.233745136" Dec 05 11:43:47 crc kubenswrapper[5014]: I1205 11:43:47.982264 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:47 crc kubenswrapper[5014]: I1205 11:43:47.982871 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:48 crc kubenswrapper[5014]: I1205 11:43:48.027227 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:48 crc kubenswrapper[5014]: I1205 11:43:48.374678 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:48 crc kubenswrapper[5014]: I1205 11:43:48.454874 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tfm8d"] Dec 05 11:43:50 crc kubenswrapper[5014]: I1205 11:43:50.339215 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tfm8d" podUID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerName="registry-server" containerID="cri-o://227e3cc1e602ac13f0f0040f9659438ec9c48722c397604af83fedd3ee3ec073" gracePeriod=2 Dec 05 11:43:51 crc kubenswrapper[5014]: I1205 11:43:51.349058 5014 generic.go:334] "Generic (PLEG): container finished" podID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerID="227e3cc1e602ac13f0f0040f9659438ec9c48722c397604af83fedd3ee3ec073" exitCode=0 Dec 05 11:43:51 crc kubenswrapper[5014]: I1205 11:43:51.349134 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfm8d" event={"ID":"c94a286e-5eae-4c61-bfe6-c8b90310bbe1","Type":"ContainerDied","Data":"227e3cc1e602ac13f0f0040f9659438ec9c48722c397604af83fedd3ee3ec073"} Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.139354 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.219322 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ms6kg\" (UniqueName: \"kubernetes.io/projected/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-kube-api-access-ms6kg\") pod \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.219369 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-catalog-content\") pod \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.219577 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-utilities\") pod \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\" (UID: \"c94a286e-5eae-4c61-bfe6-c8b90310bbe1\") " Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.220408 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-utilities" (OuterVolumeSpecName: "utilities") pod "c94a286e-5eae-4c61-bfe6-c8b90310bbe1" (UID: "c94a286e-5eae-4c61-bfe6-c8b90310bbe1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.225770 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-kube-api-access-ms6kg" (OuterVolumeSpecName: "kube-api-access-ms6kg") pod "c94a286e-5eae-4c61-bfe6-c8b90310bbe1" (UID: "c94a286e-5eae-4c61-bfe6-c8b90310bbe1"). InnerVolumeSpecName "kube-api-access-ms6kg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.321819 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ms6kg\" (UniqueName: \"kubernetes.io/projected/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-kube-api-access-ms6kg\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.321856 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.321789 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c94a286e-5eae-4c61-bfe6-c8b90310bbe1" (UID: "c94a286e-5eae-4c61-bfe6-c8b90310bbe1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.328859 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:43:53 crc kubenswrapper[5014]: E1205 11:43:53.329164 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.374403 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tfm8d" event={"ID":"c94a286e-5eae-4c61-bfe6-c8b90310bbe1","Type":"ContainerDied","Data":"fd969cb1eec18d9eed56650257fc0839c80177f26491ba2c631b2eb76c42a645"} Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.374451 5014 scope.go:117] "RemoveContainer" containerID="227e3cc1e602ac13f0f0040f9659438ec9c48722c397604af83fedd3ee3ec073" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.374547 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tfm8d" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.404611 5014 scope.go:117] "RemoveContainer" containerID="d9407cdefd2800b7642cc9fab1140d931727cb5850ddf561038b8b5d2acb2aac" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.405436 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tfm8d"] Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.415190 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tfm8d"] Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.423755 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c94a286e-5eae-4c61-bfe6-c8b90310bbe1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:43:53 crc kubenswrapper[5014]: I1205 11:43:53.426135 5014 scope.go:117] "RemoveContainer" containerID="b29944786c194ef27cd8b67c558509c919793aff2669acaca78dad96e4d6e570" Dec 05 11:43:55 crc kubenswrapper[5014]: I1205 11:43:55.327432 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" path="/var/lib/kubelet/pods/c94a286e-5eae-4c61-bfe6-c8b90310bbe1/volumes" Dec 05 11:43:55 crc kubenswrapper[5014]: I1205 11:43:55.964777 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n2zcm"] Dec 05 11:43:55 crc kubenswrapper[5014]: E1205 11:43:55.965240 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerName="extract-utilities" Dec 05 11:43:55 crc kubenswrapper[5014]: I1205 11:43:55.965258 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerName="extract-utilities" Dec 05 11:43:55 crc kubenswrapper[5014]: E1205 11:43:55.965378 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerName="registry-server" Dec 05 11:43:55 crc kubenswrapper[5014]: I1205 11:43:55.965389 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerName="registry-server" Dec 05 11:43:55 crc kubenswrapper[5014]: E1205 11:43:55.965408 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerName="extract-content" Dec 05 11:43:55 crc kubenswrapper[5014]: I1205 11:43:55.965417 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerName="extract-content" Dec 05 11:43:55 crc kubenswrapper[5014]: I1205 11:43:55.965681 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="c94a286e-5eae-4c61-bfe6-c8b90310bbe1" containerName="registry-server" Dec 05 11:43:55 crc kubenswrapper[5014]: I1205 11:43:55.967980 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:55 crc kubenswrapper[5014]: I1205 11:43:55.980811 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n2zcm"] Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.073684 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snf8g\" (UniqueName: \"kubernetes.io/projected/baab7918-a4d4-421f-ac5b-0f95b8df08c0-kube-api-access-snf8g\") pod \"community-operators-n2zcm\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.073727 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-catalog-content\") pod \"community-operators-n2zcm\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.073833 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-utilities\") pod \"community-operators-n2zcm\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.175907 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-utilities\") pod \"community-operators-n2zcm\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.176056 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snf8g\" (UniqueName: \"kubernetes.io/projected/baab7918-a4d4-421f-ac5b-0f95b8df08c0-kube-api-access-snf8g\") pod \"community-operators-n2zcm\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.176087 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-catalog-content\") pod \"community-operators-n2zcm\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.176555 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-utilities\") pod \"community-operators-n2zcm\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.176645 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-catalog-content\") pod \"community-operators-n2zcm\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.195037 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snf8g\" (UniqueName: \"kubernetes.io/projected/baab7918-a4d4-421f-ac5b-0f95b8df08c0-kube-api-access-snf8g\") pod \"community-operators-n2zcm\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.301208 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:43:56 crc kubenswrapper[5014]: I1205 11:43:56.816362 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n2zcm"] Dec 05 11:43:56 crc kubenswrapper[5014]: W1205 11:43:56.819938 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbaab7918_a4d4_421f_ac5b_0f95b8df08c0.slice/crio-b83116d68228c1e759b5e42e7e832f3c084cd546fd443dfbffae88708be62d75 WatchSource:0}: Error finding container b83116d68228c1e759b5e42e7e832f3c084cd546fd443dfbffae88708be62d75: Status 404 returned error can't find the container with id b83116d68228c1e759b5e42e7e832f3c084cd546fd443dfbffae88708be62d75 Dec 05 11:43:57 crc kubenswrapper[5014]: I1205 11:43:57.413455 5014 generic.go:334] "Generic (PLEG): container finished" podID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerID="6bf86228254bf1fc7293bfd91d3e6f430fa1fd5d65a5923b9f304f3bdfa82953" exitCode=0 Dec 05 11:43:57 crc kubenswrapper[5014]: I1205 11:43:57.413492 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2zcm" event={"ID":"baab7918-a4d4-421f-ac5b-0f95b8df08c0","Type":"ContainerDied","Data":"6bf86228254bf1fc7293bfd91d3e6f430fa1fd5d65a5923b9f304f3bdfa82953"} Dec 05 11:43:57 crc kubenswrapper[5014]: I1205 11:43:57.413517 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2zcm" event={"ID":"baab7918-a4d4-421f-ac5b-0f95b8df08c0","Type":"ContainerStarted","Data":"b83116d68228c1e759b5e42e7e832f3c084cd546fd443dfbffae88708be62d75"} Dec 05 11:43:58 crc kubenswrapper[5014]: I1205 11:43:58.424245 5014 generic.go:334] "Generic (PLEG): container finished" podID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerID="292464489f71e7e9888fe21416520d0292e9f188913eb33718129bbfb0cbd9ea" exitCode=0 Dec 05 11:43:58 crc kubenswrapper[5014]: I1205 11:43:58.424304 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2zcm" event={"ID":"baab7918-a4d4-421f-ac5b-0f95b8df08c0","Type":"ContainerDied","Data":"292464489f71e7e9888fe21416520d0292e9f188913eb33718129bbfb0cbd9ea"} Dec 05 11:43:59 crc kubenswrapper[5014]: I1205 11:43:59.436671 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2zcm" event={"ID":"baab7918-a4d4-421f-ac5b-0f95b8df08c0","Type":"ContainerStarted","Data":"0156de301894f06413c7c0d442e7f1e8119b4bccdb468af46ed40d1803753d2b"} Dec 05 11:43:59 crc kubenswrapper[5014]: I1205 11:43:59.462899 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n2zcm" podStartSLOduration=3.077531751 podStartE2EDuration="4.462873478s" podCreationTimestamp="2025-12-05 11:43:55 +0000 UTC" firstStartedPulling="2025-12-05 11:43:57.41489809 +0000 UTC m=+3364.363015794" lastFinishedPulling="2025-12-05 11:43:58.800239807 +0000 UTC m=+3365.748357521" observedRunningTime="2025-12-05 11:43:59.45234781 +0000 UTC m=+3366.400465524" watchObservedRunningTime="2025-12-05 11:43:59.462873478 +0000 UTC m=+3366.410991182" Dec 05 11:44:01 crc kubenswrapper[5014]: E1205 11:44:01.227350 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice/crio-fd969cb1eec18d9eed56650257fc0839c80177f26491ba2c631b2eb76c42a645\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice\": RecentStats: unable to find data in memory cache]" Dec 05 11:44:04 crc kubenswrapper[5014]: I1205 11:44:04.318999 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:44:04 crc kubenswrapper[5014]: E1205 11:44:04.319833 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:44:06 crc kubenswrapper[5014]: I1205 11:44:06.301574 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:44:06 crc kubenswrapper[5014]: I1205 11:44:06.301905 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:44:06 crc kubenswrapper[5014]: I1205 11:44:06.346777 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:44:06 crc kubenswrapper[5014]: I1205 11:44:06.573550 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:44:06 crc kubenswrapper[5014]: I1205 11:44:06.630569 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n2zcm"] Dec 05 11:44:08 crc kubenswrapper[5014]: I1205 11:44:08.534915 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n2zcm" podUID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerName="registry-server" containerID="cri-o://0156de301894f06413c7c0d442e7f1e8119b4bccdb468af46ed40d1803753d2b" gracePeriod=2 Dec 05 11:44:10 crc kubenswrapper[5014]: I1205 11:44:10.082554 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-scheduler-0" podUID="934811df-aabf-44df-8b73-4612a55d73a2" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.165:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 11:44:11 crc kubenswrapper[5014]: E1205 11:44:11.479514 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice/crio-fd969cb1eec18d9eed56650257fc0839c80177f26491ba2c631b2eb76c42a645\": RecentStats: unable to find data in memory cache]" Dec 05 11:44:11 crc kubenswrapper[5014]: I1205 11:44:11.568690 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-n2zcm_baab7918-a4d4-421f-ac5b-0f95b8df08c0/registry-server/0.log" Dec 05 11:44:11 crc kubenswrapper[5014]: I1205 11:44:11.569508 5014 generic.go:334] "Generic (PLEG): container finished" podID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerID="0156de301894f06413c7c0d442e7f1e8119b4bccdb468af46ed40d1803753d2b" exitCode=137 Dec 05 11:44:11 crc kubenswrapper[5014]: I1205 11:44:11.569557 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2zcm" event={"ID":"baab7918-a4d4-421f-ac5b-0f95b8df08c0","Type":"ContainerDied","Data":"0156de301894f06413c7c0d442e7f1e8119b4bccdb468af46ed40d1803753d2b"} Dec 05 11:44:12 crc kubenswrapper[5014]: I1205 11:44:12.838226 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-n2zcm_baab7918-a4d4-421f-ac5b-0f95b8df08c0/registry-server/0.log" Dec 05 11:44:12 crc kubenswrapper[5014]: I1205 11:44:12.840035 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:44:12 crc kubenswrapper[5014]: I1205 11:44:12.926175 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snf8g\" (UniqueName: \"kubernetes.io/projected/baab7918-a4d4-421f-ac5b-0f95b8df08c0-kube-api-access-snf8g\") pod \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " Dec 05 11:44:12 crc kubenswrapper[5014]: I1205 11:44:12.926255 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-utilities\") pod \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " Dec 05 11:44:12 crc kubenswrapper[5014]: I1205 11:44:12.926400 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-catalog-content\") pod \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\" (UID: \"baab7918-a4d4-421f-ac5b-0f95b8df08c0\") " Dec 05 11:44:12 crc kubenswrapper[5014]: I1205 11:44:12.927145 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-utilities" (OuterVolumeSpecName: "utilities") pod "baab7918-a4d4-421f-ac5b-0f95b8df08c0" (UID: "baab7918-a4d4-421f-ac5b-0f95b8df08c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:44:12 crc kubenswrapper[5014]: I1205 11:44:12.943893 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baab7918-a4d4-421f-ac5b-0f95b8df08c0-kube-api-access-snf8g" (OuterVolumeSpecName: "kube-api-access-snf8g") pod "baab7918-a4d4-421f-ac5b-0f95b8df08c0" (UID: "baab7918-a4d4-421f-ac5b-0f95b8df08c0"). InnerVolumeSpecName "kube-api-access-snf8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:44:12 crc kubenswrapper[5014]: I1205 11:44:12.978318 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "baab7918-a4d4-421f-ac5b-0f95b8df08c0" (UID: "baab7918-a4d4-421f-ac5b-0f95b8df08c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.028965 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snf8g\" (UniqueName: \"kubernetes.io/projected/baab7918-a4d4-421f-ac5b-0f95b8df08c0-kube-api-access-snf8g\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.029027 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.029050 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baab7918-a4d4-421f-ac5b-0f95b8df08c0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.590317 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-n2zcm_baab7918-a4d4-421f-ac5b-0f95b8df08c0/registry-server/0.log" Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.591910 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n2zcm" event={"ID":"baab7918-a4d4-421f-ac5b-0f95b8df08c0","Type":"ContainerDied","Data":"b83116d68228c1e759b5e42e7e832f3c084cd546fd443dfbffae88708be62d75"} Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.591975 5014 scope.go:117] "RemoveContainer" containerID="0156de301894f06413c7c0d442e7f1e8119b4bccdb468af46ed40d1803753d2b" Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.592169 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n2zcm" Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.620934 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n2zcm"] Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.622484 5014 scope.go:117] "RemoveContainer" containerID="292464489f71e7e9888fe21416520d0292e9f188913eb33718129bbfb0cbd9ea" Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.642893 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n2zcm"] Dec 05 11:44:13 crc kubenswrapper[5014]: I1205 11:44:13.653617 5014 scope.go:117] "RemoveContainer" containerID="6bf86228254bf1fc7293bfd91d3e6f430fa1fd5d65a5923b9f304f3bdfa82953" Dec 05 11:44:15 crc kubenswrapper[5014]: I1205 11:44:15.330967 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" path="/var/lib/kubelet/pods/baab7918-a4d4-421f-ac5b-0f95b8df08c0/volumes" Dec 05 11:44:19 crc kubenswrapper[5014]: I1205 11:44:19.318991 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:44:19 crc kubenswrapper[5014]: E1205 11:44:19.319903 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:44:21 crc kubenswrapper[5014]: E1205 11:44:21.717905 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice/crio-fd969cb1eec18d9eed56650257fc0839c80177f26491ba2c631b2eb76c42a645\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice\": RecentStats: unable to find data in memory cache]" Dec 05 11:44:31 crc kubenswrapper[5014]: E1205 11:44:31.957705 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice/crio-fd969cb1eec18d9eed56650257fc0839c80177f26491ba2c631b2eb76c42a645\": RecentStats: unable to find data in memory cache]" Dec 05 11:44:33 crc kubenswrapper[5014]: I1205 11:44:33.323868 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:44:33 crc kubenswrapper[5014]: E1205 11:44:33.324498 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.197745 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tcf2k"] Dec 05 11:44:40 crc kubenswrapper[5014]: E1205 11:44:40.198791 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerName="extract-content" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.198806 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerName="extract-content" Dec 05 11:44:40 crc kubenswrapper[5014]: E1205 11:44:40.198828 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerName="registry-server" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.198838 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerName="registry-server" Dec 05 11:44:40 crc kubenswrapper[5014]: E1205 11:44:40.198879 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerName="extract-utilities" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.198888 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerName="extract-utilities" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.199155 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="baab7918-a4d4-421f-ac5b-0f95b8df08c0" containerName="registry-server" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.200795 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.211372 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcf2k"] Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.347772 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-utilities\") pod \"redhat-marketplace-tcf2k\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.347994 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-catalog-content\") pod \"redhat-marketplace-tcf2k\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.348186 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pr6w\" (UniqueName: \"kubernetes.io/projected/d2e1f221-ce38-4433-82da-e115a924b6c3-kube-api-access-8pr6w\") pod \"redhat-marketplace-tcf2k\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.450122 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-catalog-content\") pod \"redhat-marketplace-tcf2k\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.450205 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pr6w\" (UniqueName: \"kubernetes.io/projected/d2e1f221-ce38-4433-82da-e115a924b6c3-kube-api-access-8pr6w\") pod \"redhat-marketplace-tcf2k\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.450375 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-utilities\") pod \"redhat-marketplace-tcf2k\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.450996 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-utilities\") pod \"redhat-marketplace-tcf2k\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.451027 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-catalog-content\") pod \"redhat-marketplace-tcf2k\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.470240 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pr6w\" (UniqueName: \"kubernetes.io/projected/d2e1f221-ce38-4433-82da-e115a924b6c3-kube-api-access-8pr6w\") pod \"redhat-marketplace-tcf2k\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.529939 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:40 crc kubenswrapper[5014]: I1205 11:44:40.997628 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcf2k"] Dec 05 11:44:41 crc kubenswrapper[5014]: I1205 11:44:41.839012 5014 generic.go:334] "Generic (PLEG): container finished" podID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerID="a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777" exitCode=0 Dec 05 11:44:41 crc kubenswrapper[5014]: I1205 11:44:41.839124 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcf2k" event={"ID":"d2e1f221-ce38-4433-82da-e115a924b6c3","Type":"ContainerDied","Data":"a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777"} Dec 05 11:44:41 crc kubenswrapper[5014]: I1205 11:44:41.839336 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcf2k" event={"ID":"d2e1f221-ce38-4433-82da-e115a924b6c3","Type":"ContainerStarted","Data":"a644dab027fe70b974bdcc3b6fe0377f91beddba8b9b8859d94450bf02a6199a"} Dec 05 11:44:42 crc kubenswrapper[5014]: E1205 11:44:42.224367 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice/crio-fd969cb1eec18d9eed56650257fc0839c80177f26491ba2c631b2eb76c42a645\": RecentStats: unable to find data in memory cache]" Dec 05 11:44:42 crc kubenswrapper[5014]: I1205 11:44:42.850364 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcf2k" event={"ID":"d2e1f221-ce38-4433-82da-e115a924b6c3","Type":"ContainerStarted","Data":"45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b"} Dec 05 11:44:43 crc kubenswrapper[5014]: I1205 11:44:43.860742 5014 generic.go:334] "Generic (PLEG): container finished" podID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerID="45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b" exitCode=0 Dec 05 11:44:43 crc kubenswrapper[5014]: I1205 11:44:43.860789 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcf2k" event={"ID":"d2e1f221-ce38-4433-82da-e115a924b6c3","Type":"ContainerDied","Data":"45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b"} Dec 05 11:44:45 crc kubenswrapper[5014]: I1205 11:44:45.889770 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcf2k" event={"ID":"d2e1f221-ce38-4433-82da-e115a924b6c3","Type":"ContainerStarted","Data":"5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad"} Dec 05 11:44:45 crc kubenswrapper[5014]: I1205 11:44:45.912965 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tcf2k" podStartSLOduration=3.030017689 podStartE2EDuration="5.912944603s" podCreationTimestamp="2025-12-05 11:44:40 +0000 UTC" firstStartedPulling="2025-12-05 11:44:41.840732738 +0000 UTC m=+3408.788850442" lastFinishedPulling="2025-12-05 11:44:44.723659652 +0000 UTC m=+3411.671777356" observedRunningTime="2025-12-05 11:44:45.904893575 +0000 UTC m=+3412.853011289" watchObservedRunningTime="2025-12-05 11:44:45.912944603 +0000 UTC m=+3412.861062307" Dec 05 11:44:48 crc kubenswrapper[5014]: I1205 11:44:48.318839 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:44:48 crc kubenswrapper[5014]: E1205 11:44:48.319590 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:44:50 crc kubenswrapper[5014]: I1205 11:44:50.531698 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:50 crc kubenswrapper[5014]: I1205 11:44:50.532043 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:50 crc kubenswrapper[5014]: I1205 11:44:50.576175 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:50 crc kubenswrapper[5014]: I1205 11:44:50.975908 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:51 crc kubenswrapper[5014]: I1205 11:44:51.027452 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcf2k"] Dec 05 11:44:52 crc kubenswrapper[5014]: E1205 11:44:52.467060 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc94a286e_5eae_4c61_bfe6_c8b90310bbe1.slice/crio-fd969cb1eec18d9eed56650257fc0839c80177f26491ba2c631b2eb76c42a645\": RecentStats: unable to find data in memory cache]" Dec 05 11:44:52 crc kubenswrapper[5014]: I1205 11:44:52.949429 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tcf2k" podUID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerName="registry-server" containerID="cri-o://5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad" gracePeriod=2 Dec 05 11:44:53 crc kubenswrapper[5014]: E1205 11:44:53.347659 5014 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/3ce3a743f6688fa0eb354f68e3d5e7430a1482d127e7ab821fcae54f9e6b110d/diff" to get inode usage: stat /var/lib/containers/storage/overlay/3ce3a743f6688fa0eb354f68e3d5e7430a1482d127e7ab821fcae54f9e6b110d/diff: no such file or directory, extraDiskErr: Dec 05 11:44:53 crc kubenswrapper[5014]: I1205 11:44:53.957703 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:53 crc kubenswrapper[5014]: I1205 11:44:53.959242 5014 generic.go:334] "Generic (PLEG): container finished" podID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerID="5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad" exitCode=0 Dec 05 11:44:53 crc kubenswrapper[5014]: I1205 11:44:53.959330 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcf2k" event={"ID":"d2e1f221-ce38-4433-82da-e115a924b6c3","Type":"ContainerDied","Data":"5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad"} Dec 05 11:44:53 crc kubenswrapper[5014]: I1205 11:44:53.959365 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcf2k" event={"ID":"d2e1f221-ce38-4433-82da-e115a924b6c3","Type":"ContainerDied","Data":"a644dab027fe70b974bdcc3b6fe0377f91beddba8b9b8859d94450bf02a6199a"} Dec 05 11:44:53 crc kubenswrapper[5014]: I1205 11:44:53.959388 5014 scope.go:117] "RemoveContainer" containerID="5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad" Dec 05 11:44:53 crc kubenswrapper[5014]: I1205 11:44:53.981599 5014 scope.go:117] "RemoveContainer" containerID="45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.018149 5014 scope.go:117] "RemoveContainer" containerID="a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.060060 5014 scope.go:117] "RemoveContainer" containerID="5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad" Dec 05 11:44:54 crc kubenswrapper[5014]: E1205 11:44:54.066930 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad\": container with ID starting with 5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad not found: ID does not exist" containerID="5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.066990 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad"} err="failed to get container status \"5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad\": rpc error: code = NotFound desc = could not find container \"5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad\": container with ID starting with 5013e68a0403a5f5bc9eb853b1027114bb49f83cf8eb20c3d1ec932e204b08ad not found: ID does not exist" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.067018 5014 scope.go:117] "RemoveContainer" containerID="45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b" Dec 05 11:44:54 crc kubenswrapper[5014]: E1205 11:44:54.067328 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b\": container with ID starting with 45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b not found: ID does not exist" containerID="45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.067347 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b"} err="failed to get container status \"45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b\": rpc error: code = NotFound desc = could not find container \"45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b\": container with ID starting with 45c9f07f7783a5f02c6696f36a02178804e80c9523f9380fcbc4be35e1a7b11b not found: ID does not exist" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.067360 5014 scope.go:117] "RemoveContainer" containerID="a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777" Dec 05 11:44:54 crc kubenswrapper[5014]: E1205 11:44:54.067812 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777\": container with ID starting with a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777 not found: ID does not exist" containerID="a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.067853 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777"} err="failed to get container status \"a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777\": rpc error: code = NotFound desc = could not find container \"a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777\": container with ID starting with a2f6761629291c8b6853a107610ae8d470807335e62f3b7616c0b11877487777 not found: ID does not exist" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.104726 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-catalog-content\") pod \"d2e1f221-ce38-4433-82da-e115a924b6c3\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.104821 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-utilities\") pod \"d2e1f221-ce38-4433-82da-e115a924b6c3\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.104960 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pr6w\" (UniqueName: \"kubernetes.io/projected/d2e1f221-ce38-4433-82da-e115a924b6c3-kube-api-access-8pr6w\") pod \"d2e1f221-ce38-4433-82da-e115a924b6c3\" (UID: \"d2e1f221-ce38-4433-82da-e115a924b6c3\") " Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.106777 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-utilities" (OuterVolumeSpecName: "utilities") pod "d2e1f221-ce38-4433-82da-e115a924b6c3" (UID: "d2e1f221-ce38-4433-82da-e115a924b6c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.110797 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2e1f221-ce38-4433-82da-e115a924b6c3-kube-api-access-8pr6w" (OuterVolumeSpecName: "kube-api-access-8pr6w") pod "d2e1f221-ce38-4433-82da-e115a924b6c3" (UID: "d2e1f221-ce38-4433-82da-e115a924b6c3"). InnerVolumeSpecName "kube-api-access-8pr6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.127795 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2e1f221-ce38-4433-82da-e115a924b6c3" (UID: "d2e1f221-ce38-4433-82da-e115a924b6c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.207471 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.207514 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pr6w\" (UniqueName: \"kubernetes.io/projected/d2e1f221-ce38-4433-82da-e115a924b6c3-kube-api-access-8pr6w\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.207527 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2e1f221-ce38-4433-82da-e115a924b6c3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:44:54 crc kubenswrapper[5014]: I1205 11:44:54.976868 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcf2k" Dec 05 11:44:55 crc kubenswrapper[5014]: I1205 11:44:55.011352 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcf2k"] Dec 05 11:44:55 crc kubenswrapper[5014]: I1205 11:44:55.022073 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcf2k"] Dec 05 11:44:55 crc kubenswrapper[5014]: I1205 11:44:55.327759 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2e1f221-ce38-4433-82da-e115a924b6c3" path="/var/lib/kubelet/pods/d2e1f221-ce38-4433-82da-e115a924b6c3/volumes" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.181066 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps"] Dec 05 11:45:00 crc kubenswrapper[5014]: E1205 11:45:00.188389 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerName="registry-server" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.188407 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerName="registry-server" Dec 05 11:45:00 crc kubenswrapper[5014]: E1205 11:45:00.188419 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerName="extract-utilities" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.188427 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerName="extract-utilities" Dec 05 11:45:00 crc kubenswrapper[5014]: E1205 11:45:00.188444 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerName="extract-content" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.188453 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerName="extract-content" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.188631 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2e1f221-ce38-4433-82da-e115a924b6c3" containerName="registry-server" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.189320 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.203444 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.203806 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.213722 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps"] Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.323294 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-config-volume\") pod \"collect-profiles-29415585-xzpps\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.323364 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89ffk\" (UniqueName: \"kubernetes.io/projected/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-kube-api-access-89ffk\") pod \"collect-profiles-29415585-xzpps\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.323799 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-secret-volume\") pod \"collect-profiles-29415585-xzpps\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.425908 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-config-volume\") pod \"collect-profiles-29415585-xzpps\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.425969 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89ffk\" (UniqueName: \"kubernetes.io/projected/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-kube-api-access-89ffk\") pod \"collect-profiles-29415585-xzpps\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.425999 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-secret-volume\") pod \"collect-profiles-29415585-xzpps\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.427724 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-config-volume\") pod \"collect-profiles-29415585-xzpps\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.442092 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-secret-volume\") pod \"collect-profiles-29415585-xzpps\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.443006 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89ffk\" (UniqueName: \"kubernetes.io/projected/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-kube-api-access-89ffk\") pod \"collect-profiles-29415585-xzpps\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:00 crc kubenswrapper[5014]: I1205 11:45:00.521911 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:01 crc kubenswrapper[5014]: I1205 11:45:01.007825 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps"] Dec 05 11:45:01 crc kubenswrapper[5014]: W1205 11:45:01.011915 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3a1267c_8d0b_4a59_8e9e_f70bf7935062.slice/crio-c14701c77dcdbdc55e81360aa7011930038cda111b6fd64c7244142c14236cd9 WatchSource:0}: Error finding container c14701c77dcdbdc55e81360aa7011930038cda111b6fd64c7244142c14236cd9: Status 404 returned error can't find the container with id c14701c77dcdbdc55e81360aa7011930038cda111b6fd64c7244142c14236cd9 Dec 05 11:45:01 crc kubenswrapper[5014]: I1205 11:45:01.029774 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" event={"ID":"f3a1267c-8d0b-4a59-8e9e-f70bf7935062","Type":"ContainerStarted","Data":"c14701c77dcdbdc55e81360aa7011930038cda111b6fd64c7244142c14236cd9"} Dec 05 11:45:02 crc kubenswrapper[5014]: I1205 11:45:02.048516 5014 generic.go:334] "Generic (PLEG): container finished" podID="f3a1267c-8d0b-4a59-8e9e-f70bf7935062" containerID="7726626a6f33509bd1bef1e508491f3e04f7601292bf201b45d5a2af4a888621" exitCode=0 Dec 05 11:45:02 crc kubenswrapper[5014]: I1205 11:45:02.048568 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" event={"ID":"f3a1267c-8d0b-4a59-8e9e-f70bf7935062","Type":"ContainerDied","Data":"7726626a6f33509bd1bef1e508491f3e04f7601292bf201b45d5a2af4a888621"} Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.326993 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.449123 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.620303 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-config-volume\") pod \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.620441 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-secret-volume\") pod \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.620523 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89ffk\" (UniqueName: \"kubernetes.io/projected/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-kube-api-access-89ffk\") pod \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\" (UID: \"f3a1267c-8d0b-4a59-8e9e-f70bf7935062\") " Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.621099 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-config-volume" (OuterVolumeSpecName: "config-volume") pod "f3a1267c-8d0b-4a59-8e9e-f70bf7935062" (UID: "f3a1267c-8d0b-4a59-8e9e-f70bf7935062"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.626538 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f3a1267c-8d0b-4a59-8e9e-f70bf7935062" (UID: "f3a1267c-8d0b-4a59-8e9e-f70bf7935062"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.627073 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-kube-api-access-89ffk" (OuterVolumeSpecName: "kube-api-access-89ffk") pod "f3a1267c-8d0b-4a59-8e9e-f70bf7935062" (UID: "f3a1267c-8d0b-4a59-8e9e-f70bf7935062"). InnerVolumeSpecName "kube-api-access-89ffk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.723372 5014 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.723435 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89ffk\" (UniqueName: \"kubernetes.io/projected/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-kube-api-access-89ffk\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:03 crc kubenswrapper[5014]: I1205 11:45:03.723449 5014 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f3a1267c-8d0b-4a59-8e9e-f70bf7935062-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 11:45:04 crc kubenswrapper[5014]: I1205 11:45:04.067918 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"3fd7243a4b55b7bf69db0d88cf03050765e9917d470638c153eb302c12f878be"} Dec 05 11:45:04 crc kubenswrapper[5014]: I1205 11:45:04.070177 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" event={"ID":"f3a1267c-8d0b-4a59-8e9e-f70bf7935062","Type":"ContainerDied","Data":"c14701c77dcdbdc55e81360aa7011930038cda111b6fd64c7244142c14236cd9"} Dec 05 11:45:04 crc kubenswrapper[5014]: I1205 11:45:04.070203 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c14701c77dcdbdc55e81360aa7011930038cda111b6fd64c7244142c14236cd9" Dec 05 11:45:04 crc kubenswrapper[5014]: I1205 11:45:04.070445 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415585-xzpps" Dec 05 11:45:04 crc kubenswrapper[5014]: I1205 11:45:04.524204 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9"] Dec 05 11:45:04 crc kubenswrapper[5014]: I1205 11:45:04.531319 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415540-s24l9"] Dec 05 11:45:05 crc kubenswrapper[5014]: I1205 11:45:05.328418 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0" path="/var/lib/kubelet/pods/5e8077da-60bf-46a5-bc4c-f8eb17fbc6f0/volumes" Dec 05 11:45:06 crc kubenswrapper[5014]: I1205 11:45:06.294320 5014 scope.go:117] "RemoveContainer" containerID="fbadf84fc634890a184599ae160a12e268c3bccbdf0001d964851c2b985f97e8" Dec 05 11:46:34 crc kubenswrapper[5014]: I1205 11:46:34.786674 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-twbmg"] Dec 05 11:46:34 crc kubenswrapper[5014]: E1205 11:46:34.790186 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3a1267c-8d0b-4a59-8e9e-f70bf7935062" containerName="collect-profiles" Dec 05 11:46:34 crc kubenswrapper[5014]: I1205 11:46:34.790219 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3a1267c-8d0b-4a59-8e9e-f70bf7935062" containerName="collect-profiles" Dec 05 11:46:34 crc kubenswrapper[5014]: I1205 11:46:34.790424 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3a1267c-8d0b-4a59-8e9e-f70bf7935062" containerName="collect-profiles" Dec 05 11:46:34 crc kubenswrapper[5014]: I1205 11:46:34.792240 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:34 crc kubenswrapper[5014]: I1205 11:46:34.802437 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-twbmg"] Dec 05 11:46:34 crc kubenswrapper[5014]: I1205 11:46:34.929168 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-catalog-content\") pod \"certified-operators-twbmg\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:34 crc kubenswrapper[5014]: I1205 11:46:34.929222 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-utilities\") pod \"certified-operators-twbmg\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:34 crc kubenswrapper[5014]: I1205 11:46:34.929349 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxqtq\" (UniqueName: \"kubernetes.io/projected/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-kube-api-access-sxqtq\") pod \"certified-operators-twbmg\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.030423 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-catalog-content\") pod \"certified-operators-twbmg\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.030711 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-utilities\") pod \"certified-operators-twbmg\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.030781 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxqtq\" (UniqueName: \"kubernetes.io/projected/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-kube-api-access-sxqtq\") pod \"certified-operators-twbmg\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.031141 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-catalog-content\") pod \"certified-operators-twbmg\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.031230 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-utilities\") pod \"certified-operators-twbmg\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.058517 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxqtq\" (UniqueName: \"kubernetes.io/projected/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-kube-api-access-sxqtq\") pod \"certified-operators-twbmg\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.119175 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.601733 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-twbmg"] Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.892187 5014 generic.go:334] "Generic (PLEG): container finished" podID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerID="8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15" exitCode=0 Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.892229 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twbmg" event={"ID":"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101","Type":"ContainerDied","Data":"8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15"} Dec 05 11:46:35 crc kubenswrapper[5014]: I1205 11:46:35.892292 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twbmg" event={"ID":"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101","Type":"ContainerStarted","Data":"b773913cc7b4c88fa5b6fe0f272a83f4a88c994c4c400099393397320c7cea34"} Dec 05 11:46:36 crc kubenswrapper[5014]: I1205 11:46:36.903121 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twbmg" event={"ID":"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101","Type":"ContainerStarted","Data":"15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f"} Dec 05 11:46:37 crc kubenswrapper[5014]: I1205 11:46:37.914588 5014 generic.go:334] "Generic (PLEG): container finished" podID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerID="15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f" exitCode=0 Dec 05 11:46:37 crc kubenswrapper[5014]: I1205 11:46:37.914684 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twbmg" event={"ID":"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101","Type":"ContainerDied","Data":"15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f"} Dec 05 11:46:38 crc kubenswrapper[5014]: I1205 11:46:38.924493 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twbmg" event={"ID":"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101","Type":"ContainerStarted","Data":"fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021"} Dec 05 11:46:38 crc kubenswrapper[5014]: I1205 11:46:38.948725 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-twbmg" podStartSLOduration=2.39290917 podStartE2EDuration="4.948700155s" podCreationTimestamp="2025-12-05 11:46:34 +0000 UTC" firstStartedPulling="2025-12-05 11:46:35.894308269 +0000 UTC m=+3522.842425973" lastFinishedPulling="2025-12-05 11:46:38.450099254 +0000 UTC m=+3525.398216958" observedRunningTime="2025-12-05 11:46:38.94563787 +0000 UTC m=+3525.893755594" watchObservedRunningTime="2025-12-05 11:46:38.948700155 +0000 UTC m=+3525.896817859" Dec 05 11:46:45 crc kubenswrapper[5014]: I1205 11:46:45.120248 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:45 crc kubenswrapper[5014]: I1205 11:46:45.120760 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:45 crc kubenswrapper[5014]: I1205 11:46:45.171443 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:46 crc kubenswrapper[5014]: I1205 11:46:46.085265 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:46 crc kubenswrapper[5014]: I1205 11:46:46.139611 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-twbmg"] Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.045689 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-twbmg" podUID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerName="registry-server" containerID="cri-o://fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021" gracePeriod=2 Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.559831 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.692483 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxqtq\" (UniqueName: \"kubernetes.io/projected/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-kube-api-access-sxqtq\") pod \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.692524 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-catalog-content\") pod \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.692648 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-utilities\") pod \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\" (UID: \"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101\") " Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.693721 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-utilities" (OuterVolumeSpecName: "utilities") pod "9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" (UID: "9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.698845 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-kube-api-access-sxqtq" (OuterVolumeSpecName: "kube-api-access-sxqtq") pod "9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" (UID: "9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101"). InnerVolumeSpecName "kube-api-access-sxqtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.749462 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" (UID: "9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.796822 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.796864 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxqtq\" (UniqueName: \"kubernetes.io/projected/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-kube-api-access-sxqtq\") on node \"crc\" DevicePath \"\"" Dec 05 11:46:48 crc kubenswrapper[5014]: I1205 11:46:48.796880 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.054847 5014 generic.go:334] "Generic (PLEG): container finished" podID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerID="fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021" exitCode=0 Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.054947 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-twbmg" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.054968 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twbmg" event={"ID":"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101","Type":"ContainerDied","Data":"fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021"} Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.055876 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twbmg" event={"ID":"9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101","Type":"ContainerDied","Data":"b773913cc7b4c88fa5b6fe0f272a83f4a88c994c4c400099393397320c7cea34"} Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.055901 5014 scope.go:117] "RemoveContainer" containerID="fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.079447 5014 scope.go:117] "RemoveContainer" containerID="15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.129078 5014 scope.go:117] "RemoveContainer" containerID="8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.137628 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-twbmg"] Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.167557 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-twbmg"] Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.230583 5014 scope.go:117] "RemoveContainer" containerID="fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021" Dec 05 11:46:49 crc kubenswrapper[5014]: E1205 11:46:49.235385 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021\": container with ID starting with fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021 not found: ID does not exist" containerID="fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.235426 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021"} err="failed to get container status \"fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021\": rpc error: code = NotFound desc = could not find container \"fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021\": container with ID starting with fef981f98a8f681a285dc47f4723252a57420c98d6200e763b88c82460729021 not found: ID does not exist" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.235449 5014 scope.go:117] "RemoveContainer" containerID="15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f" Dec 05 11:46:49 crc kubenswrapper[5014]: E1205 11:46:49.238177 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f\": container with ID starting with 15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f not found: ID does not exist" containerID="15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.238203 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f"} err="failed to get container status \"15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f\": rpc error: code = NotFound desc = could not find container \"15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f\": container with ID starting with 15a9199f6370fe3b4e3931ab91e70f988070872a21e7ba69cbdf3c4c4fd8478f not found: ID does not exist" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.238219 5014 scope.go:117] "RemoveContainer" containerID="8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15" Dec 05 11:46:49 crc kubenswrapper[5014]: E1205 11:46:49.238816 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15\": container with ID starting with 8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15 not found: ID does not exist" containerID="8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.238859 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15"} err="failed to get container status \"8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15\": rpc error: code = NotFound desc = could not find container \"8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15\": container with ID starting with 8c19710541f2e2e0c9811ca3ebd70b739d1584486c2a17e28eff94085a027c15 not found: ID does not exist" Dec 05 11:46:49 crc kubenswrapper[5014]: I1205 11:46:49.329359 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" path="/var/lib/kubelet/pods/9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101/volumes" Dec 05 11:47:32 crc kubenswrapper[5014]: I1205 11:47:32.936359 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:47:32 crc kubenswrapper[5014]: I1205 11:47:32.936888 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:48:02 crc kubenswrapper[5014]: I1205 11:48:02.936517 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:48:02 crc kubenswrapper[5014]: I1205 11:48:02.937057 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:48:32 crc kubenswrapper[5014]: I1205 11:48:32.937223 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:48:32 crc kubenswrapper[5014]: I1205 11:48:32.938032 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:48:32 crc kubenswrapper[5014]: I1205 11:48:32.938113 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:48:32 crc kubenswrapper[5014]: I1205 11:48:32.939466 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3fd7243a4b55b7bf69db0d88cf03050765e9917d470638c153eb302c12f878be"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:48:32 crc kubenswrapper[5014]: I1205 11:48:32.939597 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://3fd7243a4b55b7bf69db0d88cf03050765e9917d470638c153eb302c12f878be" gracePeriod=600 Dec 05 11:48:34 crc kubenswrapper[5014]: I1205 11:48:34.699289 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="3fd7243a4b55b7bf69db0d88cf03050765e9917d470638c153eb302c12f878be" exitCode=0 Dec 05 11:48:34 crc kubenswrapper[5014]: I1205 11:48:34.699379 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"3fd7243a4b55b7bf69db0d88cf03050765e9917d470638c153eb302c12f878be"} Dec 05 11:48:34 crc kubenswrapper[5014]: I1205 11:48:34.699897 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35"} Dec 05 11:48:34 crc kubenswrapper[5014]: I1205 11:48:34.699921 5014 scope.go:117] "RemoveContainer" containerID="b130586d83d62a054e4ef61beff71e229c3c4c62da0b9a6ebc15a6f0be6dae99" Dec 05 11:50:28 crc kubenswrapper[5014]: I1205 11:50:28.887395 5014 generic.go:334] "Generic (PLEG): container finished" podID="3f886993-57e9-4023-8186-8fbdeb4fe04c" containerID="10e5b986139e32dc80abbf4951e8a804d1d45c53870158f7e317a60c78e0f921" exitCode=0 Dec 05 11:50:28 crc kubenswrapper[5014]: I1205 11:50:28.887480 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3f886993-57e9-4023-8186-8fbdeb4fe04c","Type":"ContainerDied","Data":"10e5b986139e32dc80abbf4951e8a804d1d45c53870158f7e317a60c78e0f921"} Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.254339 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.362116 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-config-data\") pod \"3f886993-57e9-4023-8186-8fbdeb4fe04c\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.362234 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ca-certs\") pod \"3f886993-57e9-4023-8186-8fbdeb4fe04c\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.362372 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config-secret\") pod \"3f886993-57e9-4023-8186-8fbdeb4fe04c\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.362414 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"3f886993-57e9-4023-8186-8fbdeb4fe04c\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.362494 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-temporary\") pod \"3f886993-57e9-4023-8186-8fbdeb4fe04c\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.362529 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ssh-key\") pod \"3f886993-57e9-4023-8186-8fbdeb4fe04c\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.362551 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mf5l5\" (UniqueName: \"kubernetes.io/projected/3f886993-57e9-4023-8186-8fbdeb4fe04c-kube-api-access-mf5l5\") pod \"3f886993-57e9-4023-8186-8fbdeb4fe04c\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.362619 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-workdir\") pod \"3f886993-57e9-4023-8186-8fbdeb4fe04c\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.362651 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config\") pod \"3f886993-57e9-4023-8186-8fbdeb4fe04c\" (UID: \"3f886993-57e9-4023-8186-8fbdeb4fe04c\") " Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.363197 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "3f886993-57e9-4023-8186-8fbdeb4fe04c" (UID: "3f886993-57e9-4023-8186-8fbdeb4fe04c"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.363778 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-config-data" (OuterVolumeSpecName: "config-data") pod "3f886993-57e9-4023-8186-8fbdeb4fe04c" (UID: "3f886993-57e9-4023-8186-8fbdeb4fe04c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.367137 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "3f886993-57e9-4023-8186-8fbdeb4fe04c" (UID: "3f886993-57e9-4023-8186-8fbdeb4fe04c"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.368699 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "test-operator-logs") pod "3f886993-57e9-4023-8186-8fbdeb4fe04c" (UID: "3f886993-57e9-4023-8186-8fbdeb4fe04c"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.369360 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f886993-57e9-4023-8186-8fbdeb4fe04c-kube-api-access-mf5l5" (OuterVolumeSpecName: "kube-api-access-mf5l5") pod "3f886993-57e9-4023-8186-8fbdeb4fe04c" (UID: "3f886993-57e9-4023-8186-8fbdeb4fe04c"). InnerVolumeSpecName "kube-api-access-mf5l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.391610 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "3f886993-57e9-4023-8186-8fbdeb4fe04c" (UID: "3f886993-57e9-4023-8186-8fbdeb4fe04c"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.402613 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3f886993-57e9-4023-8186-8fbdeb4fe04c" (UID: "3f886993-57e9-4023-8186-8fbdeb4fe04c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.404151 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "3f886993-57e9-4023-8186-8fbdeb4fe04c" (UID: "3f886993-57e9-4023-8186-8fbdeb4fe04c"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.411004 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "3f886993-57e9-4023-8186-8fbdeb4fe04c" (UID: "3f886993-57e9-4023-8186-8fbdeb4fe04c"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.465095 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.465122 5014 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.465130 5014 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.465958 5014 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.466171 5014 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.466186 5014 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f886993-57e9-4023-8186-8fbdeb4fe04c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.466198 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mf5l5\" (UniqueName: \"kubernetes.io/projected/3f886993-57e9-4023-8186-8fbdeb4fe04c-kube-api-access-mf5l5\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.466285 5014 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3f886993-57e9-4023-8186-8fbdeb4fe04c-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.466327 5014 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3f886993-57e9-4023-8186-8fbdeb4fe04c-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.486677 5014 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.568296 5014 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.907549 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3f886993-57e9-4023-8186-8fbdeb4fe04c","Type":"ContainerDied","Data":"5532021ed9e3a653abb213135604034f96a2b83a1bac15060d63e329eea5973e"} Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.907592 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5532021ed9e3a653abb213135604034f96a2b83a1bac15060d63e329eea5973e" Dec 05 11:50:30 crc kubenswrapper[5014]: I1205 11:50:30.907717 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 11:50:31 crc kubenswrapper[5014]: E1205 11:50:31.008881 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f886993_57e9_4023_8186_8fbdeb4fe04c.slice/crio-5532021ed9e3a653abb213135604034f96a2b83a1bac15060d63e329eea5973e\": RecentStats: unable to find data in memory cache]" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.743789 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 11:50:42 crc kubenswrapper[5014]: E1205 11:50:42.745351 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerName="extract-utilities" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.745382 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerName="extract-utilities" Dec 05 11:50:42 crc kubenswrapper[5014]: E1205 11:50:42.745452 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f886993-57e9-4023-8186-8fbdeb4fe04c" containerName="tempest-tests-tempest-tests-runner" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.745470 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f886993-57e9-4023-8186-8fbdeb4fe04c" containerName="tempest-tests-tempest-tests-runner" Dec 05 11:50:42 crc kubenswrapper[5014]: E1205 11:50:42.745500 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerName="registry-server" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.745518 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerName="registry-server" Dec 05 11:50:42 crc kubenswrapper[5014]: E1205 11:50:42.745553 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerName="extract-content" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.745570 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerName="extract-content" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.746012 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f886993-57e9-4023-8186-8fbdeb4fe04c" containerName="tempest-tests-tempest-tests-runner" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.746079 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fe3fd9f-fe61-4a8b-bcf5-7ee86192a101" containerName="registry-server" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.747439 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.749780 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-5rq69" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.769960 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.828403 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pb2n\" (UniqueName: \"kubernetes.io/projected/9593349e-2888-4068-9ec8-9b7c4a154a9e-kube-api-access-2pb2n\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9593349e-2888-4068-9ec8-9b7c4a154a9e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.828605 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9593349e-2888-4068-9ec8-9b7c4a154a9e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.930788 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9593349e-2888-4068-9ec8-9b7c4a154a9e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.930922 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pb2n\" (UniqueName: \"kubernetes.io/projected/9593349e-2888-4068-9ec8-9b7c4a154a9e-kube-api-access-2pb2n\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9593349e-2888-4068-9ec8-9b7c4a154a9e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.931262 5014 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9593349e-2888-4068-9ec8-9b7c4a154a9e\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.949810 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pb2n\" (UniqueName: \"kubernetes.io/projected/9593349e-2888-4068-9ec8-9b7c4a154a9e-kube-api-access-2pb2n\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9593349e-2888-4068-9ec8-9b7c4a154a9e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 11:50:42 crc kubenswrapper[5014]: I1205 11:50:42.966024 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9593349e-2888-4068-9ec8-9b7c4a154a9e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 11:50:43 crc kubenswrapper[5014]: I1205 11:50:43.082343 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 11:50:43 crc kubenswrapper[5014]: I1205 11:50:43.524163 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 11:50:43 crc kubenswrapper[5014]: I1205 11:50:43.531291 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:50:44 crc kubenswrapper[5014]: I1205 11:50:44.062661 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"9593349e-2888-4068-9ec8-9b7c4a154a9e","Type":"ContainerStarted","Data":"7dd6e7ecf20236e19ca88bd63e08611e85f22641f1be96d9521b79e87ac01d0d"} Dec 05 11:50:45 crc kubenswrapper[5014]: I1205 11:50:45.077370 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"9593349e-2888-4068-9ec8-9b7c4a154a9e","Type":"ContainerStarted","Data":"515fcac461e40c3e93ddebc5dea46199995b72f1d2b3de69ec7606170519dd8a"} Dec 05 11:50:45 crc kubenswrapper[5014]: I1205 11:50:45.108968 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.256731079 podStartE2EDuration="3.108949854s" podCreationTimestamp="2025-12-05 11:50:42 +0000 UTC" firstStartedPulling="2025-12-05 11:50:43.530997821 +0000 UTC m=+3770.479115525" lastFinishedPulling="2025-12-05 11:50:44.383216596 +0000 UTC m=+3771.331334300" observedRunningTime="2025-12-05 11:50:45.093356333 +0000 UTC m=+3772.041474077" watchObservedRunningTime="2025-12-05 11:50:45.108949854 +0000 UTC m=+3772.057067568" Dec 05 11:51:02 crc kubenswrapper[5014]: I1205 11:51:02.936888 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:51:02 crc kubenswrapper[5014]: I1205 11:51:02.937695 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.155915 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rwdzw/must-gather-8c7pj"] Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.158493 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/must-gather-8c7pj" Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.164182 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-rwdzw"/"openshift-service-ca.crt" Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.167345 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-rwdzw"/"kube-root-ca.crt" Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.179661 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-rwdzw/must-gather-8c7pj"] Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.258871 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-must-gather-output\") pod \"must-gather-8c7pj\" (UID: \"37b6e3fb-a68e-4c8b-8472-cce8a2c29449\") " pod="openshift-must-gather-rwdzw/must-gather-8c7pj" Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.258953 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwnqn\" (UniqueName: \"kubernetes.io/projected/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-kube-api-access-rwnqn\") pod \"must-gather-8c7pj\" (UID: \"37b6e3fb-a68e-4c8b-8472-cce8a2c29449\") " pod="openshift-must-gather-rwdzw/must-gather-8c7pj" Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.361237 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwnqn\" (UniqueName: \"kubernetes.io/projected/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-kube-api-access-rwnqn\") pod \"must-gather-8c7pj\" (UID: \"37b6e3fb-a68e-4c8b-8472-cce8a2c29449\") " pod="openshift-must-gather-rwdzw/must-gather-8c7pj" Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.361503 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-must-gather-output\") pod \"must-gather-8c7pj\" (UID: \"37b6e3fb-a68e-4c8b-8472-cce8a2c29449\") " pod="openshift-must-gather-rwdzw/must-gather-8c7pj" Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.362046 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-must-gather-output\") pod \"must-gather-8c7pj\" (UID: \"37b6e3fb-a68e-4c8b-8472-cce8a2c29449\") " pod="openshift-must-gather-rwdzw/must-gather-8c7pj" Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.383200 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwnqn\" (UniqueName: \"kubernetes.io/projected/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-kube-api-access-rwnqn\") pod \"must-gather-8c7pj\" (UID: \"37b6e3fb-a68e-4c8b-8472-cce8a2c29449\") " pod="openshift-must-gather-rwdzw/must-gather-8c7pj" Dec 05 11:51:10 crc kubenswrapper[5014]: I1205 11:51:10.494627 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/must-gather-8c7pj" Dec 05 11:51:11 crc kubenswrapper[5014]: I1205 11:51:11.052080 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-rwdzw/must-gather-8c7pj"] Dec 05 11:51:11 crc kubenswrapper[5014]: I1205 11:51:11.344290 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/must-gather-8c7pj" event={"ID":"37b6e3fb-a68e-4c8b-8472-cce8a2c29449","Type":"ContainerStarted","Data":"6dc079dd14fdfdbed43e16a2da1ce6f23c45b0066623f3358494b01b48b46a97"} Dec 05 11:51:17 crc kubenswrapper[5014]: I1205 11:51:17.426123 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/must-gather-8c7pj" event={"ID":"37b6e3fb-a68e-4c8b-8472-cce8a2c29449","Type":"ContainerStarted","Data":"45eae07d8ebf624e5a9f632e073b3e87f7554140ea8fcb706553c0a911989b58"} Dec 05 11:51:17 crc kubenswrapper[5014]: I1205 11:51:17.426701 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/must-gather-8c7pj" event={"ID":"37b6e3fb-a68e-4c8b-8472-cce8a2c29449","Type":"ContainerStarted","Data":"b8134ced2afb7c58666fdc550ef86ad1ecec84cf848d0ef5f20b1bada58997ab"} Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.263138 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-rwdzw/must-gather-8c7pj" podStartSLOduration=4.844662809 podStartE2EDuration="10.263117761s" podCreationTimestamp="2025-12-05 11:51:10 +0000 UTC" firstStartedPulling="2025-12-05 11:51:11.055929378 +0000 UTC m=+3798.004047082" lastFinishedPulling="2025-12-05 11:51:16.47438433 +0000 UTC m=+3803.422502034" observedRunningTime="2025-12-05 11:51:17.443897587 +0000 UTC m=+3804.392015301" watchObservedRunningTime="2025-12-05 11:51:20.263117761 +0000 UTC m=+3807.211235475" Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.272576 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rwdzw/crc-debug-b7jfq"] Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.274058 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.276034 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-rwdzw"/"default-dockercfg-t8f45" Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.351809 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cdb2\" (UniqueName: \"kubernetes.io/projected/032ef6bd-035f-45ea-ac73-0b5b80585704-kube-api-access-8cdb2\") pod \"crc-debug-b7jfq\" (UID: \"032ef6bd-035f-45ea-ac73-0b5b80585704\") " pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.351931 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/032ef6bd-035f-45ea-ac73-0b5b80585704-host\") pod \"crc-debug-b7jfq\" (UID: \"032ef6bd-035f-45ea-ac73-0b5b80585704\") " pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.453637 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cdb2\" (UniqueName: \"kubernetes.io/projected/032ef6bd-035f-45ea-ac73-0b5b80585704-kube-api-access-8cdb2\") pod \"crc-debug-b7jfq\" (UID: \"032ef6bd-035f-45ea-ac73-0b5b80585704\") " pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.453779 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/032ef6bd-035f-45ea-ac73-0b5b80585704-host\") pod \"crc-debug-b7jfq\" (UID: \"032ef6bd-035f-45ea-ac73-0b5b80585704\") " pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.454699 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/032ef6bd-035f-45ea-ac73-0b5b80585704-host\") pod \"crc-debug-b7jfq\" (UID: \"032ef6bd-035f-45ea-ac73-0b5b80585704\") " pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.476997 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cdb2\" (UniqueName: \"kubernetes.io/projected/032ef6bd-035f-45ea-ac73-0b5b80585704-kube-api-access-8cdb2\") pod \"crc-debug-b7jfq\" (UID: \"032ef6bd-035f-45ea-ac73-0b5b80585704\") " pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" Dec 05 11:51:20 crc kubenswrapper[5014]: I1205 11:51:20.592100 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" Dec 05 11:51:21 crc kubenswrapper[5014]: I1205 11:51:21.473850 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" event={"ID":"032ef6bd-035f-45ea-ac73-0b5b80585704","Type":"ContainerStarted","Data":"a6f0b023162a93761c5c20654957d7a6b647b97fdf5fe066d12a41401fd2be9b"} Dec 05 11:51:32 crc kubenswrapper[5014]: I1205 11:51:32.937296 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:51:32 crc kubenswrapper[5014]: I1205 11:51:32.938352 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:51:39 crc kubenswrapper[5014]: E1205 11:51:39.679069 5014 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296" Dec 05 11:51:39 crc kubenswrapper[5014]: E1205 11:51:39.679837 5014 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:container-00,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296,Command:[chroot /host bash -c echo 'TOOLBOX_NAME=toolbox-osp' > /root/.toolboxrc ; rm -rf \"/var/tmp/sos-osp\" && mkdir -p \"/var/tmp/sos-osp\" && sudo podman rm --force toolbox-osp; sudo --preserve-env podman pull --authfile /var/lib/kubelet/config.json registry.redhat.io/rhel9/support-tools && toolbox sos report --batch --all-logs --only-plugins block,cifs,crio,devicemapper,devices,firewall_tables,firewalld,iscsi,lvm2,memory,multipath,nfs,nis,nvme,podman,process,processor,selinux,scsi,udev,logs,crypto --tmp-dir=\"/var/tmp/sos-osp\" && if [[ \"$(ls /var/log/pods/*/{*.log.*,*/*.log.*} 2>/dev/null)\" != '' ]]; then tar --ignore-failed-read --warning=no-file-changed -cJf \"/var/tmp/sos-osp/podlogs.tar.xz\" --transform 's,^,podlogs/,' /var/log/pods/*/{*.log.*,*/*.log.*} || true; fi],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:TMOUT,Value:900,ValueFrom:nil,},EnvVar{Name:HOST,Value:/host,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host,ReadOnly:false,MountPath:/host,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8cdb2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod crc-debug-b7jfq_openshift-must-gather-rwdzw(032ef6bd-035f-45ea-ac73-0b5b80585704): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 11:51:39 crc kubenswrapper[5014]: E1205 11:51:39.681068 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" podUID="032ef6bd-035f-45ea-ac73-0b5b80585704" Dec 05 11:51:40 crc kubenswrapper[5014]: E1205 11:51:40.644720 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296\\\"\"" pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" podUID="032ef6bd-035f-45ea-ac73-0b5b80585704" Dec 05 11:51:57 crc kubenswrapper[5014]: I1205 11:51:57.796424 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" event={"ID":"032ef6bd-035f-45ea-ac73-0b5b80585704","Type":"ContainerStarted","Data":"ea5e705e09a6ed1701d13ebe302efe0ece796cdcdad7ac93e003fc8430de371b"} Dec 05 11:51:57 crc kubenswrapper[5014]: I1205 11:51:57.842317 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" podStartSLOduration=1.675214215 podStartE2EDuration="37.84229876s" podCreationTimestamp="2025-12-05 11:51:20 +0000 UTC" firstStartedPulling="2025-12-05 11:51:20.621844104 +0000 UTC m=+3807.569961808" lastFinishedPulling="2025-12-05 11:51:56.788928609 +0000 UTC m=+3843.737046353" observedRunningTime="2025-12-05 11:51:57.835112343 +0000 UTC m=+3844.783230067" watchObservedRunningTime="2025-12-05 11:51:57.84229876 +0000 UTC m=+3844.790416464" Dec 05 11:52:02 crc kubenswrapper[5014]: I1205 11:52:02.937321 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:52:02 crc kubenswrapper[5014]: I1205 11:52:02.938728 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 11:52:02 crc kubenswrapper[5014]: I1205 11:52:02.938846 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 11:52:02 crc kubenswrapper[5014]: I1205 11:52:02.939611 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 11:52:02 crc kubenswrapper[5014]: I1205 11:52:02.939741 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" gracePeriod=600 Dec 05 11:52:04 crc kubenswrapper[5014]: I1205 11:52:04.040930 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" exitCode=0 Dec 05 11:52:04 crc kubenswrapper[5014]: I1205 11:52:04.041012 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35"} Dec 05 11:52:04 crc kubenswrapper[5014]: I1205 11:52:04.042131 5014 scope.go:117] "RemoveContainer" containerID="3fd7243a4b55b7bf69db0d88cf03050765e9917d470638c153eb302c12f878be" Dec 05 11:52:04 crc kubenswrapper[5014]: E1205 11:52:04.181799 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:52:05 crc kubenswrapper[5014]: I1205 11:52:05.063591 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:52:05 crc kubenswrapper[5014]: E1205 11:52:05.064184 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:52:17 crc kubenswrapper[5014]: I1205 11:52:17.319117 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:52:17 crc kubenswrapper[5014]: E1205 11:52:17.320120 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:52:28 crc kubenswrapper[5014]: I1205 11:52:28.319892 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:52:28 crc kubenswrapper[5014]: E1205 11:52:28.321330 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:52:39 crc kubenswrapper[5014]: I1205 11:52:39.320757 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:52:39 crc kubenswrapper[5014]: E1205 11:52:39.323391 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:52:42 crc kubenswrapper[5014]: I1205 11:52:42.433631 5014 generic.go:334] "Generic (PLEG): container finished" podID="032ef6bd-035f-45ea-ac73-0b5b80585704" containerID="ea5e705e09a6ed1701d13ebe302efe0ece796cdcdad7ac93e003fc8430de371b" exitCode=0 Dec 05 11:52:42 crc kubenswrapper[5014]: I1205 11:52:42.433751 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" event={"ID":"032ef6bd-035f-45ea-ac73-0b5b80585704","Type":"ContainerDied","Data":"ea5e705e09a6ed1701d13ebe302efe0ece796cdcdad7ac93e003fc8430de371b"} Dec 05 11:52:43 crc kubenswrapper[5014]: I1205 11:52:43.553438 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" Dec 05 11:52:43 crc kubenswrapper[5014]: I1205 11:52:43.587065 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rwdzw/crc-debug-b7jfq"] Dec 05 11:52:43 crc kubenswrapper[5014]: I1205 11:52:43.596004 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rwdzw/crc-debug-b7jfq"] Dec 05 11:52:43 crc kubenswrapper[5014]: I1205 11:52:43.636245 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cdb2\" (UniqueName: \"kubernetes.io/projected/032ef6bd-035f-45ea-ac73-0b5b80585704-kube-api-access-8cdb2\") pod \"032ef6bd-035f-45ea-ac73-0b5b80585704\" (UID: \"032ef6bd-035f-45ea-ac73-0b5b80585704\") " Dec 05 11:52:43 crc kubenswrapper[5014]: I1205 11:52:43.636391 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/032ef6bd-035f-45ea-ac73-0b5b80585704-host\") pod \"032ef6bd-035f-45ea-ac73-0b5b80585704\" (UID: \"032ef6bd-035f-45ea-ac73-0b5b80585704\") " Dec 05 11:52:43 crc kubenswrapper[5014]: I1205 11:52:43.636526 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/032ef6bd-035f-45ea-ac73-0b5b80585704-host" (OuterVolumeSpecName: "host") pod "032ef6bd-035f-45ea-ac73-0b5b80585704" (UID: "032ef6bd-035f-45ea-ac73-0b5b80585704"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:52:43 crc kubenswrapper[5014]: I1205 11:52:43.637019 5014 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/032ef6bd-035f-45ea-ac73-0b5b80585704-host\") on node \"crc\" DevicePath \"\"" Dec 05 11:52:43 crc kubenswrapper[5014]: I1205 11:52:43.644248 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/032ef6bd-035f-45ea-ac73-0b5b80585704-kube-api-access-8cdb2" (OuterVolumeSpecName: "kube-api-access-8cdb2") pod "032ef6bd-035f-45ea-ac73-0b5b80585704" (UID: "032ef6bd-035f-45ea-ac73-0b5b80585704"). InnerVolumeSpecName "kube-api-access-8cdb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:52:43 crc kubenswrapper[5014]: I1205 11:52:43.739179 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cdb2\" (UniqueName: \"kubernetes.io/projected/032ef6bd-035f-45ea-ac73-0b5b80585704-kube-api-access-8cdb2\") on node \"crc\" DevicePath \"\"" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.452435 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6f0b023162a93761c5c20654957d7a6b647b97fdf5fe066d12a41401fd2be9b" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.452506 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-b7jfq" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.778708 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rwdzw/crc-debug-g22wb"] Dec 05 11:52:44 crc kubenswrapper[5014]: E1205 11:52:44.779553 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="032ef6bd-035f-45ea-ac73-0b5b80585704" containerName="container-00" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.779566 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="032ef6bd-035f-45ea-ac73-0b5b80585704" containerName="container-00" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.779722 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="032ef6bd-035f-45ea-ac73-0b5b80585704" containerName="container-00" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.780324 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-g22wb" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.790406 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-rwdzw"/"default-dockercfg-t8f45" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.867290 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ft5f\" (UniqueName: \"kubernetes.io/projected/407496be-9006-4780-a3f7-8684fef5b17b-kube-api-access-6ft5f\") pod \"crc-debug-g22wb\" (UID: \"407496be-9006-4780-a3f7-8684fef5b17b\") " pod="openshift-must-gather-rwdzw/crc-debug-g22wb" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.867406 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/407496be-9006-4780-a3f7-8684fef5b17b-host\") pod \"crc-debug-g22wb\" (UID: \"407496be-9006-4780-a3f7-8684fef5b17b\") " pod="openshift-must-gather-rwdzw/crc-debug-g22wb" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.968753 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ft5f\" (UniqueName: \"kubernetes.io/projected/407496be-9006-4780-a3f7-8684fef5b17b-kube-api-access-6ft5f\") pod \"crc-debug-g22wb\" (UID: \"407496be-9006-4780-a3f7-8684fef5b17b\") " pod="openshift-must-gather-rwdzw/crc-debug-g22wb" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.968852 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/407496be-9006-4780-a3f7-8684fef5b17b-host\") pod \"crc-debug-g22wb\" (UID: \"407496be-9006-4780-a3f7-8684fef5b17b\") " pod="openshift-must-gather-rwdzw/crc-debug-g22wb" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.969072 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/407496be-9006-4780-a3f7-8684fef5b17b-host\") pod \"crc-debug-g22wb\" (UID: \"407496be-9006-4780-a3f7-8684fef5b17b\") " pod="openshift-must-gather-rwdzw/crc-debug-g22wb" Dec 05 11:52:44 crc kubenswrapper[5014]: I1205 11:52:44.992229 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ft5f\" (UniqueName: \"kubernetes.io/projected/407496be-9006-4780-a3f7-8684fef5b17b-kube-api-access-6ft5f\") pod \"crc-debug-g22wb\" (UID: \"407496be-9006-4780-a3f7-8684fef5b17b\") " pod="openshift-must-gather-rwdzw/crc-debug-g22wb" Dec 05 11:52:45 crc kubenswrapper[5014]: I1205 11:52:45.102566 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-g22wb" Dec 05 11:52:45 crc kubenswrapper[5014]: I1205 11:52:45.337898 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="032ef6bd-035f-45ea-ac73-0b5b80585704" path="/var/lib/kubelet/pods/032ef6bd-035f-45ea-ac73-0b5b80585704/volumes" Dec 05 11:52:45 crc kubenswrapper[5014]: I1205 11:52:45.461667 5014 generic.go:334] "Generic (PLEG): container finished" podID="407496be-9006-4780-a3f7-8684fef5b17b" containerID="eaf5a2d6bdd2eb88e6e14b5705da358d096ec121923e89a29f8ec635c61a913e" exitCode=0 Dec 05 11:52:45 crc kubenswrapper[5014]: I1205 11:52:45.461715 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/crc-debug-g22wb" event={"ID":"407496be-9006-4780-a3f7-8684fef5b17b","Type":"ContainerDied","Data":"eaf5a2d6bdd2eb88e6e14b5705da358d096ec121923e89a29f8ec635c61a913e"} Dec 05 11:52:45 crc kubenswrapper[5014]: I1205 11:52:45.461745 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/crc-debug-g22wb" event={"ID":"407496be-9006-4780-a3f7-8684fef5b17b","Type":"ContainerStarted","Data":"cd4ccdd4bbd3b7242aa72dbd889c00dbcb9acec4a46cbf2bb55e103f07cc1e41"} Dec 05 11:52:46 crc kubenswrapper[5014]: I1205 11:52:46.006224 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rwdzw/crc-debug-g22wb"] Dec 05 11:52:46 crc kubenswrapper[5014]: I1205 11:52:46.013748 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rwdzw/crc-debug-g22wb"] Dec 05 11:52:46 crc kubenswrapper[5014]: I1205 11:52:46.591770 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-g22wb" Dec 05 11:52:46 crc kubenswrapper[5014]: I1205 11:52:46.697188 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/407496be-9006-4780-a3f7-8684fef5b17b-host\") pod \"407496be-9006-4780-a3f7-8684fef5b17b\" (UID: \"407496be-9006-4780-a3f7-8684fef5b17b\") " Dec 05 11:52:46 crc kubenswrapper[5014]: I1205 11:52:46.697393 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ft5f\" (UniqueName: \"kubernetes.io/projected/407496be-9006-4780-a3f7-8684fef5b17b-kube-api-access-6ft5f\") pod \"407496be-9006-4780-a3f7-8684fef5b17b\" (UID: \"407496be-9006-4780-a3f7-8684fef5b17b\") " Dec 05 11:52:46 crc kubenswrapper[5014]: I1205 11:52:46.697467 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/407496be-9006-4780-a3f7-8684fef5b17b-host" (OuterVolumeSpecName: "host") pod "407496be-9006-4780-a3f7-8684fef5b17b" (UID: "407496be-9006-4780-a3f7-8684fef5b17b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:52:46 crc kubenswrapper[5014]: I1205 11:52:46.697990 5014 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/407496be-9006-4780-a3f7-8684fef5b17b-host\") on node \"crc\" DevicePath \"\"" Dec 05 11:52:46 crc kubenswrapper[5014]: I1205 11:52:46.702737 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/407496be-9006-4780-a3f7-8684fef5b17b-kube-api-access-6ft5f" (OuterVolumeSpecName: "kube-api-access-6ft5f") pod "407496be-9006-4780-a3f7-8684fef5b17b" (UID: "407496be-9006-4780-a3f7-8684fef5b17b"). InnerVolumeSpecName "kube-api-access-6ft5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:52:46 crc kubenswrapper[5014]: I1205 11:52:46.800080 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ft5f\" (UniqueName: \"kubernetes.io/projected/407496be-9006-4780-a3f7-8684fef5b17b-kube-api-access-6ft5f\") on node \"crc\" DevicePath \"\"" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.181357 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rwdzw/crc-debug-l9x6w"] Dec 05 11:52:47 crc kubenswrapper[5014]: E1205 11:52:47.182189 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="407496be-9006-4780-a3f7-8684fef5b17b" containerName="container-00" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.182248 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="407496be-9006-4780-a3f7-8684fef5b17b" containerName="container-00" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.182838 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="407496be-9006-4780-a3f7-8684fef5b17b" containerName="container-00" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.184025 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.309080 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6ww2\" (UniqueName: \"kubernetes.io/projected/f6e87f45-a650-408a-8453-ca47feba3592-kube-api-access-m6ww2\") pod \"crc-debug-l9x6w\" (UID: \"f6e87f45-a650-408a-8453-ca47feba3592\") " pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.309149 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f6e87f45-a650-408a-8453-ca47feba3592-host\") pod \"crc-debug-l9x6w\" (UID: \"f6e87f45-a650-408a-8453-ca47feba3592\") " pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.335562 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="407496be-9006-4780-a3f7-8684fef5b17b" path="/var/lib/kubelet/pods/407496be-9006-4780-a3f7-8684fef5b17b/volumes" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.411871 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6ww2\" (UniqueName: \"kubernetes.io/projected/f6e87f45-a650-408a-8453-ca47feba3592-kube-api-access-m6ww2\") pod \"crc-debug-l9x6w\" (UID: \"f6e87f45-a650-408a-8453-ca47feba3592\") " pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.411935 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f6e87f45-a650-408a-8453-ca47feba3592-host\") pod \"crc-debug-l9x6w\" (UID: \"f6e87f45-a650-408a-8453-ca47feba3592\") " pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.412048 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f6e87f45-a650-408a-8453-ca47feba3592-host\") pod \"crc-debug-l9x6w\" (UID: \"f6e87f45-a650-408a-8453-ca47feba3592\") " pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.430534 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6ww2\" (UniqueName: \"kubernetes.io/projected/f6e87f45-a650-408a-8453-ca47feba3592-kube-api-access-m6ww2\") pod \"crc-debug-l9x6w\" (UID: \"f6e87f45-a650-408a-8453-ca47feba3592\") " pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.485499 5014 scope.go:117] "RemoveContainer" containerID="eaf5a2d6bdd2eb88e6e14b5705da358d096ec121923e89a29f8ec635c61a913e" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.485552 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-g22wb" Dec 05 11:52:47 crc kubenswrapper[5014]: I1205 11:52:47.516149 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" Dec 05 11:52:47 crc kubenswrapper[5014]: W1205 11:52:47.539952 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6e87f45_a650_408a_8453_ca47feba3592.slice/crio-8288ea0f465821075506d667cdcd5f9d969b96f50c1e38faf87e7237c6580c3b WatchSource:0}: Error finding container 8288ea0f465821075506d667cdcd5f9d969b96f50c1e38faf87e7237c6580c3b: Status 404 returned error can't find the container with id 8288ea0f465821075506d667cdcd5f9d969b96f50c1e38faf87e7237c6580c3b Dec 05 11:52:48 crc kubenswrapper[5014]: I1205 11:52:48.495128 5014 generic.go:334] "Generic (PLEG): container finished" podID="f6e87f45-a650-408a-8453-ca47feba3592" containerID="c188a1d9a87d460e7e20a1b61d5955a2f19fe4119ebea95147bd96c0c13cdc8a" exitCode=0 Dec 05 11:52:48 crc kubenswrapper[5014]: I1205 11:52:48.495223 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" event={"ID":"f6e87f45-a650-408a-8453-ca47feba3592","Type":"ContainerDied","Data":"c188a1d9a87d460e7e20a1b61d5955a2f19fe4119ebea95147bd96c0c13cdc8a"} Dec 05 11:52:48 crc kubenswrapper[5014]: I1205 11:52:48.495542 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" event={"ID":"f6e87f45-a650-408a-8453-ca47feba3592","Type":"ContainerStarted","Data":"8288ea0f465821075506d667cdcd5f9d969b96f50c1e38faf87e7237c6580c3b"} Dec 05 11:52:48 crc kubenswrapper[5014]: I1205 11:52:48.537864 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rwdzw/crc-debug-l9x6w"] Dec 05 11:52:48 crc kubenswrapper[5014]: I1205 11:52:48.544810 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rwdzw/crc-debug-l9x6w"] Dec 05 11:52:49 crc kubenswrapper[5014]: I1205 11:52:49.600545 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" Dec 05 11:52:49 crc kubenswrapper[5014]: I1205 11:52:49.773991 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f6e87f45-a650-408a-8453-ca47feba3592-host\") pod \"f6e87f45-a650-408a-8453-ca47feba3592\" (UID: \"f6e87f45-a650-408a-8453-ca47feba3592\") " Dec 05 11:52:49 crc kubenswrapper[5014]: I1205 11:52:49.774090 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f6e87f45-a650-408a-8453-ca47feba3592-host" (OuterVolumeSpecName: "host") pod "f6e87f45-a650-408a-8453-ca47feba3592" (UID: "f6e87f45-a650-408a-8453-ca47feba3592"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 11:52:49 crc kubenswrapper[5014]: I1205 11:52:49.774195 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6ww2\" (UniqueName: \"kubernetes.io/projected/f6e87f45-a650-408a-8453-ca47feba3592-kube-api-access-m6ww2\") pod \"f6e87f45-a650-408a-8453-ca47feba3592\" (UID: \"f6e87f45-a650-408a-8453-ca47feba3592\") " Dec 05 11:52:49 crc kubenswrapper[5014]: I1205 11:52:49.774813 5014 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f6e87f45-a650-408a-8453-ca47feba3592-host\") on node \"crc\" DevicePath \"\"" Dec 05 11:52:49 crc kubenswrapper[5014]: I1205 11:52:49.789580 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6e87f45-a650-408a-8453-ca47feba3592-kube-api-access-m6ww2" (OuterVolumeSpecName: "kube-api-access-m6ww2") pod "f6e87f45-a650-408a-8453-ca47feba3592" (UID: "f6e87f45-a650-408a-8453-ca47feba3592"). InnerVolumeSpecName "kube-api-access-m6ww2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:52:49 crc kubenswrapper[5014]: I1205 11:52:49.876750 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6ww2\" (UniqueName: \"kubernetes.io/projected/f6e87f45-a650-408a-8453-ca47feba3592-kube-api-access-m6ww2\") on node \"crc\" DevicePath \"\"" Dec 05 11:52:50 crc kubenswrapper[5014]: I1205 11:52:50.318169 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:52:50 crc kubenswrapper[5014]: E1205 11:52:50.318961 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:52:50 crc kubenswrapper[5014]: I1205 11:52:50.518710 5014 scope.go:117] "RemoveContainer" containerID="c188a1d9a87d460e7e20a1b61d5955a2f19fe4119ebea95147bd96c0c13cdc8a" Dec 05 11:52:50 crc kubenswrapper[5014]: I1205 11:52:50.518834 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/crc-debug-l9x6w" Dec 05 11:52:51 crc kubenswrapper[5014]: I1205 11:52:51.329882 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6e87f45-a650-408a-8453-ca47feba3592" path="/var/lib/kubelet/pods/f6e87f45-a650-408a-8453-ca47feba3592/volumes" Dec 05 11:53:03 crc kubenswrapper[5014]: I1205 11:53:03.324419 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:53:03 crc kubenswrapper[5014]: E1205 11:53:03.325129 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.044771 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-c6dbf5d74-pbtjs_65c45e15-99d6-4c93-ae6e-67bd07e7eba9/barbican-api/0.log" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.133154 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-c6dbf5d74-pbtjs_65c45e15-99d6-4c93-ae6e-67bd07e7eba9/barbican-api-log/0.log" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.270662 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-c76ffd784-m8mzt_9cfdc764-b85e-48e8-8a0e-0945c00f278f/barbican-keystone-listener/0.log" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.341830 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-c76ffd784-m8mzt_9cfdc764-b85e-48e8-8a0e-0945c00f278f/barbican-keystone-listener-log/0.log" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.485995 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-78b5c6757c-hdtxh_7a5e0260-dfe5-4f24-82bc-e172af4db809/barbican-worker/0.log" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.488692 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-78b5c6757c-hdtxh_7a5e0260-dfe5-4f24-82bc-e172af4db809/barbican-worker-log/0.log" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.772459 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr_4fc14e6b-fae1-4d4c-96f8-f5a86422a20a/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.816962 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4d4ebd00-2f01-406e-9763-e4e58f33f09d/ceilometer-central-agent/0.log" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.918998 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4d4ebd00-2f01-406e-9763-e4e58f33f09d/ceilometer-notification-agent/0.log" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.976604 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4d4ebd00-2f01-406e-9763-e4e58f33f09d/sg-core/0.log" Dec 05 11:53:05 crc kubenswrapper[5014]: I1205 11:53:05.996590 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4d4ebd00-2f01-406e-9763-e4e58f33f09d/proxy-httpd/0.log" Dec 05 11:53:06 crc kubenswrapper[5014]: I1205 11:53:06.180670 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_5e322d23-65da-40e8-b814-815c148aa523/cinder-api/0.log" Dec 05 11:53:06 crc kubenswrapper[5014]: I1205 11:53:06.218844 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_5e322d23-65da-40e8-b814-815c148aa523/cinder-api-log/0.log" Dec 05 11:53:06 crc kubenswrapper[5014]: I1205 11:53:06.318139 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_934811df-aabf-44df-8b73-4612a55d73a2/cinder-scheduler/0.log" Dec 05 11:53:06 crc kubenswrapper[5014]: I1205 11:53:06.405755 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_934811df-aabf-44df-8b73-4612a55d73a2/probe/0.log" Dec 05 11:53:06 crc kubenswrapper[5014]: I1205 11:53:06.511942 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-m9t55_209c8894-646c-40b1-a33f-3890d10b3e28/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:06 crc kubenswrapper[5014]: I1205 11:53:06.603337 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5_aa2bf2b4-c7fa-40e6-adee-d043c47760bc/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:06 crc kubenswrapper[5014]: I1205 11:53:06.705673 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-k5tgq_d8a409e0-f594-4164-950f-c1285bf165af/init/0.log" Dec 05 11:53:06 crc kubenswrapper[5014]: I1205 11:53:06.869222 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-k5tgq_d8a409e0-f594-4164-950f-c1285bf165af/init/0.log" Dec 05 11:53:06 crc kubenswrapper[5014]: I1205 11:53:06.911356 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-k5tgq_d8a409e0-f594-4164-950f-c1285bf165af/dnsmasq-dns/0.log" Dec 05 11:53:06 crc kubenswrapper[5014]: I1205 11:53:06.994974 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6_82e883e4-b7b9-463c-99e5-ac0a855a22cd/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:07 crc kubenswrapper[5014]: I1205 11:53:07.137319 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_c59756a0-84f2-4678-9294-aaa2475d08ec/glance-log/0.log" Dec 05 11:53:07 crc kubenswrapper[5014]: I1205 11:53:07.148607 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_c59756a0-84f2-4678-9294-aaa2475d08ec/glance-httpd/0.log" Dec 05 11:53:07 crc kubenswrapper[5014]: I1205 11:53:07.310170 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_927d96cb-db91-42ec-8963-4b1259c7b65f/glance-httpd/0.log" Dec 05 11:53:07 crc kubenswrapper[5014]: I1205 11:53:07.331240 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_927d96cb-db91-42ec-8963-4b1259c7b65f/glance-log/0.log" Dec 05 11:53:07 crc kubenswrapper[5014]: I1205 11:53:07.495434 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-575d445b9b-l7wlc_b5b07bd8-c674-4647-a09b-eae67ddad491/horizon/0.log" Dec 05 11:53:07 crc kubenswrapper[5014]: I1205 11:53:07.615786 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn_f632ba62-c6d0-4229-9d26-cf78c7738723/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:07 crc kubenswrapper[5014]: I1205 11:53:07.827287 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-575d445b9b-l7wlc_b5b07bd8-c674-4647-a09b-eae67ddad491/horizon-log/0.log" Dec 05 11:53:07 crc kubenswrapper[5014]: I1205 11:53:07.946303 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-jns8m_6df89661-8d7a-4ea6-b3ca-4560ecc324f3/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:08 crc kubenswrapper[5014]: I1205 11:53:08.208778 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_732c79b1-258d-4426-9adf-3019d0935a81/kube-state-metrics/0.log" Dec 05 11:53:08 crc kubenswrapper[5014]: I1205 11:53:08.240330 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-c6c5974d5-l72zk_d9c4da24-4b94-4a9f-982f-9114df83cc67/keystone-api/0.log" Dec 05 11:53:08 crc kubenswrapper[5014]: I1205 11:53:08.417618 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h_fab44f82-d30a-4bb9-b416-5ff67a5f55b6/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:08 crc kubenswrapper[5014]: I1205 11:53:08.762868 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5d74b89875-gnlqf_91f750dc-c2ab-4b76-b659-4f5e11bf2e85/neutron-httpd/0.log" Dec 05 11:53:08 crc kubenswrapper[5014]: I1205 11:53:08.823012 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5d74b89875-gnlqf_91f750dc-c2ab-4b76-b659-4f5e11bf2e85/neutron-api/0.log" Dec 05 11:53:08 crc kubenswrapper[5014]: I1205 11:53:08.854703 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn_d0637356-0bbd-4cbb-a24b-88a27079fb82/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:09 crc kubenswrapper[5014]: I1205 11:53:09.409330 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_533479f0-4f9e-46b8-a2b8-b0eea26ae3bc/nova-cell0-conductor-conductor/0.log" Dec 05 11:53:09 crc kubenswrapper[5014]: I1205 11:53:09.441186 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_512551f9-cbaf-4245-9c35-68a0d6adc709/nova-api-log/0.log" Dec 05 11:53:09 crc kubenswrapper[5014]: I1205 11:53:09.689429 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_2d40af86-9a47-4de0-aa6f-a0ec696d2c23/nova-cell1-conductor-conductor/0.log" Dec 05 11:53:09 crc kubenswrapper[5014]: I1205 11:53:09.726445 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_512551f9-cbaf-4245-9c35-68a0d6adc709/nova-api-api/0.log" Dec 05 11:53:09 crc kubenswrapper[5014]: I1205 11:53:09.767697 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_0d4f49a8-b03d-40a0-b688-1e47556fe7b0/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 11:53:09 crc kubenswrapper[5014]: I1205 11:53:09.951621 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-hw7cx_1ef403f3-902c-41ac-874b-25627e6b5637/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:10 crc kubenswrapper[5014]: I1205 11:53:10.109089 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c0e65cd2-d320-4d94-8ea2-034e56ba5880/nova-metadata-log/0.log" Dec 05 11:53:10 crc kubenswrapper[5014]: I1205 11:53:10.326675 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_4b1e7313-1a79-42e6-b286-0046ddd16e69/nova-scheduler-scheduler/0.log" Dec 05 11:53:10 crc kubenswrapper[5014]: I1205 11:53:10.368478 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_021926d7-f03a-4b1b-bcf3-bdd000b17a1e/mysql-bootstrap/0.log" Dec 05 11:53:10 crc kubenswrapper[5014]: I1205 11:53:10.619511 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_021926d7-f03a-4b1b-bcf3-bdd000b17a1e/mysql-bootstrap/0.log" Dec 05 11:53:10 crc kubenswrapper[5014]: I1205 11:53:10.659217 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_021926d7-f03a-4b1b-bcf3-bdd000b17a1e/galera/0.log" Dec 05 11:53:10 crc kubenswrapper[5014]: I1205 11:53:10.824650 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa/mysql-bootstrap/0.log" Dec 05 11:53:11 crc kubenswrapper[5014]: I1205 11:53:11.021745 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa/mysql-bootstrap/0.log" Dec 05 11:53:11 crc kubenswrapper[5014]: I1205 11:53:11.039438 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa/galera/0.log" Dec 05 11:53:11 crc kubenswrapper[5014]: I1205 11:53:11.278806 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56/openstackclient/0.log" Dec 05 11:53:11 crc kubenswrapper[5014]: I1205 11:53:11.324207 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c0e65cd2-d320-4d94-8ea2-034e56ba5880/nova-metadata-metadata/0.log" Dec 05 11:53:11 crc kubenswrapper[5014]: I1205 11:53:11.362641 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-6wk9t_2b673e96-d37f-49d8-b3f2-c72cd66ab6db/ovn-controller/0.log" Dec 05 11:53:11 crc kubenswrapper[5014]: I1205 11:53:11.555190 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ff9qh_4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad/openstack-network-exporter/0.log" Dec 05 11:53:11 crc kubenswrapper[5014]: I1205 11:53:11.605587 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mfsjr_abc53eaa-a216-4ea8-a223-4e2c79562edb/ovsdb-server-init/0.log" Dec 05 11:53:11 crc kubenswrapper[5014]: I1205 11:53:11.771456 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mfsjr_abc53eaa-a216-4ea8-a223-4e2c79562edb/ovs-vswitchd/0.log" Dec 05 11:53:11 crc kubenswrapper[5014]: I1205 11:53:11.832832 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mfsjr_abc53eaa-a216-4ea8-a223-4e2c79562edb/ovsdb-server/0.log" Dec 05 11:53:11 crc kubenswrapper[5014]: I1205 11:53:11.835946 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mfsjr_abc53eaa-a216-4ea8-a223-4e2c79562edb/ovsdb-server-init/0.log" Dec 05 11:53:12 crc kubenswrapper[5014]: I1205 11:53:12.026372 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-dsjzk_1d432303-8ec7-44e2-8a87-d5e5c8c59979/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:12 crc kubenswrapper[5014]: I1205 11:53:12.038877 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_55bc4dc6-b48b-4963-9004-7614f65bac44/openstack-network-exporter/0.log" Dec 05 11:53:12 crc kubenswrapper[5014]: I1205 11:53:12.124376 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_55bc4dc6-b48b-4963-9004-7614f65bac44/ovn-northd/0.log" Dec 05 11:53:12 crc kubenswrapper[5014]: I1205 11:53:12.321935 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_af56d79f-8f8a-4710-96a9-7995c0a30467/openstack-network-exporter/0.log" Dec 05 11:53:12 crc kubenswrapper[5014]: I1205 11:53:12.348731 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_af56d79f-8f8a-4710-96a9-7995c0a30467/ovsdbserver-nb/0.log" Dec 05 11:53:12 crc kubenswrapper[5014]: I1205 11:53:12.552836 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b28650ad-9ebf-471c-91c9-3adef7f85d9f/openstack-network-exporter/0.log" Dec 05 11:53:12 crc kubenswrapper[5014]: I1205 11:53:12.554183 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b28650ad-9ebf-471c-91c9-3adef7f85d9f/ovsdbserver-sb/0.log" Dec 05 11:53:12 crc kubenswrapper[5014]: I1205 11:53:12.678658 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-75c6d4746d-f9vpc_81877d92-8552-4149-a92a-9a9bdfc431b4/placement-api/0.log" Dec 05 11:53:12 crc kubenswrapper[5014]: I1205 11:53:12.858759 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-75c6d4746d-f9vpc_81877d92-8552-4149-a92a-9a9bdfc431b4/placement-log/0.log" Dec 05 11:53:12 crc kubenswrapper[5014]: I1205 11:53:12.910165 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b16a0ee0-c10b-41b2-a636-4b066b470df6/setup-container/0.log" Dec 05 11:53:13 crc kubenswrapper[5014]: I1205 11:53:13.145005 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b16a0ee0-c10b-41b2-a636-4b066b470df6/rabbitmq/0.log" Dec 05 11:53:13 crc kubenswrapper[5014]: I1205 11:53:13.243315 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b16a0ee0-c10b-41b2-a636-4b066b470df6/setup-container/0.log" Dec 05 11:53:13 crc kubenswrapper[5014]: I1205 11:53:13.302798 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_46830cc1-2cdb-48ad-86a0-159b73d805c3/setup-container/0.log" Dec 05 11:53:13 crc kubenswrapper[5014]: I1205 11:53:13.430355 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_46830cc1-2cdb-48ad-86a0-159b73d805c3/setup-container/0.log" Dec 05 11:53:13 crc kubenswrapper[5014]: I1205 11:53:13.493480 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns_9b908cdd-21cf-4f71-8bc7-83db13979563/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:13 crc kubenswrapper[5014]: I1205 11:53:13.550617 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_46830cc1-2cdb-48ad-86a0-159b73d805c3/rabbitmq/0.log" Dec 05 11:53:13 crc kubenswrapper[5014]: I1205 11:53:13.817394 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-zm7rv_6eac20dd-3e47-46e2-91fd-c684094b8d74/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:13 crc kubenswrapper[5014]: I1205 11:53:13.843091 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9_6a880be3-7a1f-4e62-9603-9469947923ce/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.075311 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-p24n8_11921594-1098-41c9-8744-7801330f646c/ssh-known-hosts-edpm-deployment/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.078106 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-bfwkr_560529fa-7baf-4bce-b55b-3816b5c7928c/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.334844 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-848c5c7c55-ctrjl_36878e89-1c1c-4054-b9a5-159e056f95f4/proxy-server/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.465150 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-848c5c7c55-ctrjl_36878e89-1c1c-4054-b9a5-159e056f95f4/proxy-httpd/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.482265 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-nxnqm_3d39e279-9315-4b5e-af14-ea88aef45b00/swift-ring-rebalance/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.698936 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/account-auditor/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.709502 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/account-reaper/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.720558 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/account-replicator/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.858368 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/account-server/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.936295 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/container-auditor/0.log" Dec 05 11:53:14 crc kubenswrapper[5014]: I1205 11:53:14.949042 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/container-replicator/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.005465 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/container-server/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.123561 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/container-updater/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.213358 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/object-auditor/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.241401 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/object-expirer/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.253785 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/object-replicator/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.347166 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/object-server/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.426378 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/object-updater/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.457964 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/rsync/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.514895 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/swift-recon-cron/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.758985 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_3f886993-57e9-4023-8186-8fbdeb4fe04c/tempest-tests-tempest-tests-runner/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.769263 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6_68801bb8-5aae-4367-9c85-a1c139ab1844/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.930244 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_9593349e-2888-4068-9ec8-9b7c4a154a9e/test-operator-logs-container/0.log" Dec 05 11:53:15 crc kubenswrapper[5014]: I1205 11:53:15.986730 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-jv96g_69a4f49e-1b6f-4085-81da-69b0e099b769/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 11:53:17 crc kubenswrapper[5014]: I1205 11:53:17.320941 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:53:17 crc kubenswrapper[5014]: E1205 11:53:17.321493 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:53:23 crc kubenswrapper[5014]: I1205 11:53:23.676648 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_ec058eab-d721-4033-b346-bddf43d1de29/memcached/0.log" Dec 05 11:53:32 crc kubenswrapper[5014]: I1205 11:53:32.319104 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:53:32 crc kubenswrapper[5014]: E1205 11:53:32.320199 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.542350 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cr7x9"] Dec 05 11:53:41 crc kubenswrapper[5014]: E1205 11:53:41.543499 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e87f45-a650-408a-8453-ca47feba3592" containerName="container-00" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.543521 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e87f45-a650-408a-8453-ca47feba3592" containerName="container-00" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.543800 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e87f45-a650-408a-8453-ca47feba3592" containerName="container-00" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.545983 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.553357 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cr7x9"] Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.647083 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgxmv\" (UniqueName: \"kubernetes.io/projected/93cb7817-e1f4-42b3-bb15-79d7da27eb01-kube-api-access-rgxmv\") pod \"redhat-operators-cr7x9\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.647511 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-catalog-content\") pod \"redhat-operators-cr7x9\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.647773 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-utilities\") pod \"redhat-operators-cr7x9\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.749257 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-utilities\") pod \"redhat-operators-cr7x9\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.749334 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgxmv\" (UniqueName: \"kubernetes.io/projected/93cb7817-e1f4-42b3-bb15-79d7da27eb01-kube-api-access-rgxmv\") pod \"redhat-operators-cr7x9\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.749360 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-catalog-content\") pod \"redhat-operators-cr7x9\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.749984 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-catalog-content\") pod \"redhat-operators-cr7x9\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.750287 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-utilities\") pod \"redhat-operators-cr7x9\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.875487 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgxmv\" (UniqueName: \"kubernetes.io/projected/93cb7817-e1f4-42b3-bb15-79d7da27eb01-kube-api-access-rgxmv\") pod \"redhat-operators-cr7x9\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:41 crc kubenswrapper[5014]: I1205 11:53:41.886284 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:42 crc kubenswrapper[5014]: I1205 11:53:42.411784 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cr7x9"] Dec 05 11:53:43 crc kubenswrapper[5014]: I1205 11:53:43.018487 5014 generic.go:334] "Generic (PLEG): container finished" podID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerID="73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90" exitCode=0 Dec 05 11:53:43 crc kubenswrapper[5014]: I1205 11:53:43.018616 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cr7x9" event={"ID":"93cb7817-e1f4-42b3-bb15-79d7da27eb01","Type":"ContainerDied","Data":"73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90"} Dec 05 11:53:43 crc kubenswrapper[5014]: I1205 11:53:43.018821 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cr7x9" event={"ID":"93cb7817-e1f4-42b3-bb15-79d7da27eb01","Type":"ContainerStarted","Data":"3b9329c1fdb7738ac17df3efa2bc5273da3d381c59f68d55a43db1b74d674fd5"} Dec 05 11:53:43 crc kubenswrapper[5014]: I1205 11:53:43.334031 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:53:43 crc kubenswrapper[5014]: E1205 11:53:43.334664 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:53:43 crc kubenswrapper[5014]: I1205 11:53:43.943130 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/util/0.log" Dec 05 11:53:44 crc kubenswrapper[5014]: I1205 11:53:44.028895 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cr7x9" event={"ID":"93cb7817-e1f4-42b3-bb15-79d7da27eb01","Type":"ContainerStarted","Data":"e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85"} Dec 05 11:53:44 crc kubenswrapper[5014]: I1205 11:53:44.257466 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/util/0.log" Dec 05 11:53:44 crc kubenswrapper[5014]: I1205 11:53:44.264280 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/pull/0.log" Dec 05 11:53:44 crc kubenswrapper[5014]: I1205 11:53:44.269749 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/pull/0.log" Dec 05 11:53:44 crc kubenswrapper[5014]: I1205 11:53:44.606709 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/pull/0.log" Dec 05 11:53:44 crc kubenswrapper[5014]: I1205 11:53:44.667459 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/extract/0.log" Dec 05 11:53:44 crc kubenswrapper[5014]: I1205 11:53:44.703962 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/util/0.log" Dec 05 11:53:44 crc kubenswrapper[5014]: I1205 11:53:44.704513 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-txc7h_fce514d3-328b-4d3f-b863-8fbb70bac467/kube-rbac-proxy/0.log" Dec 05 11:53:44 crc kubenswrapper[5014]: I1205 11:53:44.863199 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-txc7h_fce514d3-328b-4d3f-b863-8fbb70bac467/manager/0.log" Dec 05 11:53:44 crc kubenswrapper[5014]: I1205 11:53:44.985260 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-dtmzt_26989151-2ab4-4ae1-9d53-f9c038fba7e1/kube-rbac-proxy/0.log" Dec 05 11:53:45 crc kubenswrapper[5014]: I1205 11:53:45.016819 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-dtmzt_26989151-2ab4-4ae1-9d53-f9c038fba7e1/manager/0.log" Dec 05 11:53:45 crc kubenswrapper[5014]: I1205 11:53:45.204725 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-pdzgg_90daaa58-8638-46b7-9492-27f70cc124a8/manager/0.log" Dec 05 11:53:45 crc kubenswrapper[5014]: I1205 11:53:45.211264 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-pdzgg_90daaa58-8638-46b7-9492-27f70cc124a8/kube-rbac-proxy/0.log" Dec 05 11:53:45 crc kubenswrapper[5014]: I1205 11:53:45.416110 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-xvrmp_4e5afc7a-459a-4a76-bf92-fd47a823833e/kube-rbac-proxy/0.log" Dec 05 11:53:45 crc kubenswrapper[5014]: I1205 11:53:45.563917 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-xvrmp_4e5afc7a-459a-4a76-bf92-fd47a823833e/manager/0.log" Dec 05 11:53:45 crc kubenswrapper[5014]: I1205 11:53:45.566515 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-422qp_2720df25-8eec-42e6-8c03-8b9d18314712/kube-rbac-proxy/0.log" Dec 05 11:53:45 crc kubenswrapper[5014]: I1205 11:53:45.645054 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-422qp_2720df25-8eec-42e6-8c03-8b9d18314712/manager/0.log" Dec 05 11:53:45 crc kubenswrapper[5014]: I1205 11:53:45.730136 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-pdg4w_aaf5b26b-5eaf-4143-b78f-69f8c976c10a/kube-rbac-proxy/0.log" Dec 05 11:53:45 crc kubenswrapper[5014]: I1205 11:53:45.826743 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-pdg4w_aaf5b26b-5eaf-4143-b78f-69f8c976c10a/manager/0.log" Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.018225 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-lsnnj_344c7e6d-3b0d-4874-b9f1-40b7ae307199/kube-rbac-proxy/0.log" Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.052548 5014 generic.go:334] "Generic (PLEG): container finished" podID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerID="e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85" exitCode=0 Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.052596 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cr7x9" event={"ID":"93cb7817-e1f4-42b3-bb15-79d7da27eb01","Type":"ContainerDied","Data":"e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85"} Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.077006 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-lsnnj_344c7e6d-3b0d-4874-b9f1-40b7ae307199/manager/0.log" Dec 05 11:53:46 crc kubenswrapper[5014]: E1205 11:53:46.083175 5014 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93cb7817_e1f4_42b3_bb15_79d7da27eb01.slice/crio-conmon-e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85.scope\": RecentStats: unable to find data in memory cache]" Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.165696 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-n7nfr_17fd6d59-b4b9-4dea-b697-3998c5d10976/kube-rbac-proxy/0.log" Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.269013 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-n7nfr_17fd6d59-b4b9-4dea-b697-3998c5d10976/manager/0.log" Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.290517 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-b9d89_02a9a463-6c8c-4771-b583-6ea38f60b446/kube-rbac-proxy/0.log" Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.403814 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-b9d89_02a9a463-6c8c-4771-b583-6ea38f60b446/manager/0.log" Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.556177 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-tvfjq_18a639b9-d602-4c6d-8c71-28611cbd65bf/kube-rbac-proxy/0.log" Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.613795 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-tvfjq_18a639b9-d602-4c6d-8c71-28611cbd65bf/manager/0.log" Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.973363 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-h9j4b_2779b764-e7b5-448c-b189-9e450b7123cb/kube-rbac-proxy/0.log" Dec 05 11:53:46 crc kubenswrapper[5014]: I1205 11:53:46.988423 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-h9j4b_2779b764-e7b5-448c-b189-9e450b7123cb/manager/0.log" Dec 05 11:53:47 crc kubenswrapper[5014]: I1205 11:53:47.116935 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-kctmp_7a3504d5-c870-42a1-8cb4-cceed657effe/kube-rbac-proxy/0.log" Dec 05 11:53:47 crc kubenswrapper[5014]: I1205 11:53:47.230563 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-kctmp_7a3504d5-c870-42a1-8cb4-cceed657effe/manager/0.log" Dec 05 11:53:47 crc kubenswrapper[5014]: I1205 11:53:47.387946 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-t8lzs_355e95da-4f3d-4dce-b35e-79162bedce09/manager/0.log" Dec 05 11:53:47 crc kubenswrapper[5014]: I1205 11:53:47.430802 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-t8lzs_355e95da-4f3d-4dce-b35e-79162bedce09/kube-rbac-proxy/0.log" Dec 05 11:53:47 crc kubenswrapper[5014]: I1205 11:53:47.734633 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-v2hdr_5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37/kube-rbac-proxy/0.log" Dec 05 11:53:47 crc kubenswrapper[5014]: I1205 11:53:47.869411 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-v2hdr_5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37/manager/0.log" Dec 05 11:53:47 crc kubenswrapper[5014]: I1205 11:53:47.892938 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd_2fba4b2b-28c2-41b6-86a8-7bb26b432f71/kube-rbac-proxy/0.log" Dec 05 11:53:48 crc kubenswrapper[5014]: I1205 11:53:48.039780 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd_2fba4b2b-28c2-41b6-86a8-7bb26b432f71/manager/0.log" Dec 05 11:53:48 crc kubenswrapper[5014]: I1205 11:53:48.076233 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cr7x9" event={"ID":"93cb7817-e1f4-42b3-bb15-79d7da27eb01","Type":"ContainerStarted","Data":"38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8"} Dec 05 11:53:48 crc kubenswrapper[5014]: I1205 11:53:48.096085 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cr7x9" podStartSLOduration=3.107450428 podStartE2EDuration="7.096068093s" podCreationTimestamp="2025-12-05 11:53:41 +0000 UTC" firstStartedPulling="2025-12-05 11:53:43.020201029 +0000 UTC m=+3949.968318723" lastFinishedPulling="2025-12-05 11:53:47.008818684 +0000 UTC m=+3953.956936388" observedRunningTime="2025-12-05 11:53:48.090954779 +0000 UTC m=+3955.039072503" watchObservedRunningTime="2025-12-05 11:53:48.096068093 +0000 UTC m=+3955.044185797" Dec 05 11:53:48 crc kubenswrapper[5014]: I1205 11:53:48.363110 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-58785945fc-gt9n2_bdd8a367-e716-47ca-99d2-4b9fe9af1f6e/operator/0.log" Dec 05 11:53:48 crc kubenswrapper[5014]: I1205 11:53:48.377119 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-snr6z_f1566577-7102-49a6-a5b8-d27f4b03e350/registry-server/0.log" Dec 05 11:53:48 crc kubenswrapper[5014]: I1205 11:53:48.790812 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-svxkx_e3bb4ae5-8495-40c1-9a07-affdc714ebe0/kube-rbac-proxy/0.log" Dec 05 11:53:48 crc kubenswrapper[5014]: I1205 11:53:48.931306 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-svxkx_e3bb4ae5-8495-40c1-9a07-affdc714ebe0/manager/0.log" Dec 05 11:53:49 crc kubenswrapper[5014]: I1205 11:53:49.044825 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-dbxkc_eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47/kube-rbac-proxy/0.log" Dec 05 11:53:49 crc kubenswrapper[5014]: I1205 11:53:49.112716 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-69b6fcdff-tzs9c_ddc5d07f-9748-41de-82c4-cf52f02063ac/manager/0.log" Dec 05 11:53:49 crc kubenswrapper[5014]: I1205 11:53:49.171792 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-dbxkc_eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47/manager/0.log" Dec 05 11:53:49 crc kubenswrapper[5014]: I1205 11:53:49.248637 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-hppvs_fce76d80-94e7-4c38-93c0-044691915f03/operator/0.log" Dec 05 11:53:49 crc kubenswrapper[5014]: I1205 11:53:49.351514 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-l9gtg_831cc4a4-0997-4669-8c6d-9dbd8eaea14e/kube-rbac-proxy/0.log" Dec 05 11:53:49 crc kubenswrapper[5014]: I1205 11:53:49.894114 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-l9gtg_831cc4a4-0997-4669-8c6d-9dbd8eaea14e/manager/0.log" Dec 05 11:53:49 crc kubenswrapper[5014]: I1205 11:53:49.910582 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-48xdk_444b1e62-4d81-4e12-8110-9b5f680b3336/kube-rbac-proxy/0.log" Dec 05 11:53:50 crc kubenswrapper[5014]: I1205 11:53:50.007182 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-48xdk_444b1e62-4d81-4e12-8110-9b5f680b3336/manager/0.log" Dec 05 11:53:50 crc kubenswrapper[5014]: I1205 11:53:50.108856 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-fn75n_6cb20401-6c79-43c4-a649-c1df07de148a/manager/0.log" Dec 05 11:53:50 crc kubenswrapper[5014]: I1205 11:53:50.164562 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-fn75n_6cb20401-6c79-43c4-a649-c1df07de148a/kube-rbac-proxy/0.log" Dec 05 11:53:50 crc kubenswrapper[5014]: I1205 11:53:50.268423 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-brstb_3948de6a-fa93-4223-bda4-73afc54cc63c/kube-rbac-proxy/0.log" Dec 05 11:53:50 crc kubenswrapper[5014]: I1205 11:53:50.302839 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-brstb_3948de6a-fa93-4223-bda4-73afc54cc63c/manager/0.log" Dec 05 11:53:51 crc kubenswrapper[5014]: I1205 11:53:51.887666 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:51 crc kubenswrapper[5014]: I1205 11:53:51.888004 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:53:52 crc kubenswrapper[5014]: I1205 11:53:52.956733 5014 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cr7x9" podUID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerName="registry-server" probeResult="failure" output=< Dec 05 11:53:52 crc kubenswrapper[5014]: timeout: failed to connect service ":50051" within 1s Dec 05 11:53:52 crc kubenswrapper[5014]: > Dec 05 11:53:54 crc kubenswrapper[5014]: I1205 11:53:54.325653 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:53:54 crc kubenswrapper[5014]: E1205 11:53:54.331429 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:54:01 crc kubenswrapper[5014]: I1205 11:54:01.931216 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:54:01 crc kubenswrapper[5014]: I1205 11:54:01.990664 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:54:02 crc kubenswrapper[5014]: I1205 11:54:02.170440 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cr7x9"] Dec 05 11:54:03 crc kubenswrapper[5014]: I1205 11:54:03.215193 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cr7x9" podUID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerName="registry-server" containerID="cri-o://38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8" gracePeriod=2 Dec 05 11:54:03 crc kubenswrapper[5014]: I1205 11:54:03.915783 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:54:03 crc kubenswrapper[5014]: I1205 11:54:03.975208 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-catalog-content\") pod \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " Dec 05 11:54:03 crc kubenswrapper[5014]: I1205 11:54:03.975363 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgxmv\" (UniqueName: \"kubernetes.io/projected/93cb7817-e1f4-42b3-bb15-79d7da27eb01-kube-api-access-rgxmv\") pod \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " Dec 05 11:54:03 crc kubenswrapper[5014]: I1205 11:54:03.975388 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-utilities\") pod \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\" (UID: \"93cb7817-e1f4-42b3-bb15-79d7da27eb01\") " Dec 05 11:54:03 crc kubenswrapper[5014]: I1205 11:54:03.976201 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-utilities" (OuterVolumeSpecName: "utilities") pod "93cb7817-e1f4-42b3-bb15-79d7da27eb01" (UID: "93cb7817-e1f4-42b3-bb15-79d7da27eb01"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:54:03 crc kubenswrapper[5014]: I1205 11:54:03.982580 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93cb7817-e1f4-42b3-bb15-79d7da27eb01-kube-api-access-rgxmv" (OuterVolumeSpecName: "kube-api-access-rgxmv") pod "93cb7817-e1f4-42b3-bb15-79d7da27eb01" (UID: "93cb7817-e1f4-42b3-bb15-79d7da27eb01"). InnerVolumeSpecName "kube-api-access-rgxmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.077909 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgxmv\" (UniqueName: \"kubernetes.io/projected/93cb7817-e1f4-42b3-bb15-79d7da27eb01-kube-api-access-rgxmv\") on node \"crc\" DevicePath \"\"" Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.077945 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.094320 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "93cb7817-e1f4-42b3-bb15-79d7da27eb01" (UID: "93cb7817-e1f4-42b3-bb15-79d7da27eb01"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.179341 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93cb7817-e1f4-42b3-bb15-79d7da27eb01-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.226018 5014 generic.go:334] "Generic (PLEG): container finished" podID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerID="38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8" exitCode=0 Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.226078 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cr7x9" event={"ID":"93cb7817-e1f4-42b3-bb15-79d7da27eb01","Type":"ContainerDied","Data":"38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8"} Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.226117 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cr7x9" Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.226141 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cr7x9" event={"ID":"93cb7817-e1f4-42b3-bb15-79d7da27eb01","Type":"ContainerDied","Data":"3b9329c1fdb7738ac17df3efa2bc5273da3d381c59f68d55a43db1b74d674fd5"} Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.226167 5014 scope.go:117] "RemoveContainer" containerID="38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8" Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.266735 5014 scope.go:117] "RemoveContainer" containerID="e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85" Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.268242 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cr7x9"] Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.277403 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cr7x9"] Dec 05 11:54:04 crc kubenswrapper[5014]: I1205 11:54:04.995608 5014 scope.go:117] "RemoveContainer" containerID="73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90" Dec 05 11:54:05 crc kubenswrapper[5014]: I1205 11:54:05.043064 5014 scope.go:117] "RemoveContainer" containerID="38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8" Dec 05 11:54:05 crc kubenswrapper[5014]: E1205 11:54:05.043549 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8\": container with ID starting with 38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8 not found: ID does not exist" containerID="38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8" Dec 05 11:54:05 crc kubenswrapper[5014]: I1205 11:54:05.043580 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8"} err="failed to get container status \"38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8\": rpc error: code = NotFound desc = could not find container \"38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8\": container with ID starting with 38049962db0a18f8f9e5eeb54349f5ebbffc70e24b1f5257117f98a2bab393b8 not found: ID does not exist" Dec 05 11:54:05 crc kubenswrapper[5014]: I1205 11:54:05.043600 5014 scope.go:117] "RemoveContainer" containerID="e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85" Dec 05 11:54:05 crc kubenswrapper[5014]: E1205 11:54:05.044082 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85\": container with ID starting with e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85 not found: ID does not exist" containerID="e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85" Dec 05 11:54:05 crc kubenswrapper[5014]: I1205 11:54:05.044116 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85"} err="failed to get container status \"e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85\": rpc error: code = NotFound desc = could not find container \"e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85\": container with ID starting with e518d30eb35fbbe4076cc3542578be401405c394abe8f7605d5f49bc20d3df85 not found: ID does not exist" Dec 05 11:54:05 crc kubenswrapper[5014]: I1205 11:54:05.044136 5014 scope.go:117] "RemoveContainer" containerID="73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90" Dec 05 11:54:05 crc kubenswrapper[5014]: E1205 11:54:05.044549 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90\": container with ID starting with 73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90 not found: ID does not exist" containerID="73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90" Dec 05 11:54:05 crc kubenswrapper[5014]: I1205 11:54:05.044579 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90"} err="failed to get container status \"73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90\": rpc error: code = NotFound desc = could not find container \"73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90\": container with ID starting with 73dd1cb9addee5e287a4f519dd921c0baf5f8794e1c8744dc01f408553ab1b90 not found: ID does not exist" Dec 05 11:54:05 crc kubenswrapper[5014]: I1205 11:54:05.328244 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" path="/var/lib/kubelet/pods/93cb7817-e1f4-42b3-bb15-79d7da27eb01/volumes" Dec 05 11:54:07 crc kubenswrapper[5014]: I1205 11:54:07.318794 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:54:07 crc kubenswrapper[5014]: E1205 11:54:07.319674 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:54:12 crc kubenswrapper[5014]: I1205 11:54:12.252850 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-tsztt_9357b561-29c1-4fb1-9004-8bf8378aad02/control-plane-machine-set-operator/0.log" Dec 05 11:54:12 crc kubenswrapper[5014]: I1205 11:54:12.374878 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-b2znf_ad71de77-0b33-48ff-86d1-87235f83b4bf/kube-rbac-proxy/0.log" Dec 05 11:54:12 crc kubenswrapper[5014]: I1205 11:54:12.430179 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-b2znf_ad71de77-0b33-48ff-86d1-87235f83b4bf/machine-api-operator/0.log" Dec 05 11:54:18 crc kubenswrapper[5014]: I1205 11:54:18.319849 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:54:18 crc kubenswrapper[5014]: E1205 11:54:18.320768 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:54:24 crc kubenswrapper[5014]: I1205 11:54:24.791798 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-kk69q_08898b10-ad0c-4b34-bc40-49a86e6da919/cert-manager-controller/0.log" Dec 05 11:54:25 crc kubenswrapper[5014]: I1205 11:54:25.015887 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-vjh5c_ef8d817f-5b79-4efc-aec5-cf9f4133b0e2/cert-manager-cainjector/0.log" Dec 05 11:54:25 crc kubenswrapper[5014]: I1205 11:54:25.126479 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-qhbsj_43ed4c76-e0f2-4016-8e33-ab3498c5268c/cert-manager-webhook/0.log" Dec 05 11:54:30 crc kubenswrapper[5014]: I1205 11:54:30.318725 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:54:30 crc kubenswrapper[5014]: E1205 11:54:30.319902 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:54:37 crc kubenswrapper[5014]: I1205 11:54:37.434530 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-fzxsv_043084bc-abeb-4cb7-bea1-7dae70ac655d/nmstate-console-plugin/0.log" Dec 05 11:54:37 crc kubenswrapper[5014]: I1205 11:54:37.608468 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-44tzw_a9df2f02-fdb6-46dc-bd30-25b7b4a2d357/nmstate-handler/0.log" Dec 05 11:54:37 crc kubenswrapper[5014]: I1205 11:54:37.649134 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qkl7q_47da56e6-6794-48dc-a7e6-99e6b63ecf43/nmstate-metrics/0.log" Dec 05 11:54:37 crc kubenswrapper[5014]: I1205 11:54:37.649659 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qkl7q_47da56e6-6794-48dc-a7e6-99e6b63ecf43/kube-rbac-proxy/0.log" Dec 05 11:54:37 crc kubenswrapper[5014]: I1205 11:54:37.851888 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-vvdv5_8d7613b6-e062-416c-87e5-428a84a9d24f/nmstate-operator/0.log" Dec 05 11:54:37 crc kubenswrapper[5014]: I1205 11:54:37.916409 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-rnnzf_f73b5791-5c0a-4c9f-a78f-9ed2615f4538/nmstate-webhook/0.log" Dec 05 11:54:41 crc kubenswrapper[5014]: I1205 11:54:41.318885 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:54:41 crc kubenswrapper[5014]: E1205 11:54:41.319433 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.266177 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-26kt6_6fd388fd-a96d-4997-b3b7-9fef3d7130b7/kube-rbac-proxy/0.log" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.438938 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-26kt6_6fd388fd-a96d-4997-b3b7-9fef3d7130b7/controller/0.log" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.452312 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-frr-files/0.log" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.639935 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-frr-files/0.log" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.640329 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-metrics/0.log" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.651288 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-reloader/0.log" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.668164 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-reloader/0.log" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.818929 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-reloader/0.log" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.830737 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-metrics/0.log" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.844386 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-frr-files/0.log" Dec 05 11:54:52 crc kubenswrapper[5014]: I1205 11:54:52.875294 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-metrics/0.log" Dec 05 11:54:53 crc kubenswrapper[5014]: I1205 11:54:53.045994 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-reloader/0.log" Dec 05 11:54:53 crc kubenswrapper[5014]: I1205 11:54:53.048930 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-metrics/0.log" Dec 05 11:54:53 crc kubenswrapper[5014]: I1205 11:54:53.062259 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-frr-files/0.log" Dec 05 11:54:53 crc kubenswrapper[5014]: I1205 11:54:53.109053 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/controller/0.log" Dec 05 11:54:53 crc kubenswrapper[5014]: I1205 11:54:53.284564 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/frr-metrics/0.log" Dec 05 11:54:53 crc kubenswrapper[5014]: I1205 11:54:53.309891 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/kube-rbac-proxy-frr/0.log" Dec 05 11:54:53 crc kubenswrapper[5014]: I1205 11:54:53.363767 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/kube-rbac-proxy/0.log" Dec 05 11:54:53 crc kubenswrapper[5014]: I1205 11:54:53.984037 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/reloader/0.log" Dec 05 11:54:54 crc kubenswrapper[5014]: I1205 11:54:54.038394 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-8xg84_cd82d1e5-3ac0-4669-a192-3b8bbf071ad5/frr-k8s-webhook-server/0.log" Dec 05 11:54:54 crc kubenswrapper[5014]: I1205 11:54:54.269940 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-54df4ff95d-j68c8_2a78d9b2-16fa-4586-86cf-96397edefe00/manager/0.log" Dec 05 11:54:54 crc kubenswrapper[5014]: I1205 11:54:54.377966 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/frr/0.log" Dec 05 11:54:54 crc kubenswrapper[5014]: I1205 11:54:54.512865 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5f44fbc487-54m6r_bb726700-5715-4a97-92c4-f8a50a0922bb/webhook-server/0.log" Dec 05 11:54:54 crc kubenswrapper[5014]: I1205 11:54:54.535295 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-qr2zx_0cc6b871-45be-4887-a73b-a2fe99989d41/kube-rbac-proxy/0.log" Dec 05 11:54:54 crc kubenswrapper[5014]: I1205 11:54:54.943153 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-qr2zx_0cc6b871-45be-4887-a73b-a2fe99989d41/speaker/0.log" Dec 05 11:54:55 crc kubenswrapper[5014]: I1205 11:54:55.318506 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:54:55 crc kubenswrapper[5014]: E1205 11:54:55.319123 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:55:06 crc kubenswrapper[5014]: I1205 11:55:06.535495 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/util/0.log" Dec 05 11:55:06 crc kubenswrapper[5014]: I1205 11:55:06.718258 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/util/0.log" Dec 05 11:55:06 crc kubenswrapper[5014]: I1205 11:55:06.768126 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/pull/0.log" Dec 05 11:55:06 crc kubenswrapper[5014]: I1205 11:55:06.771524 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/pull/0.log" Dec 05 11:55:06 crc kubenswrapper[5014]: I1205 11:55:06.949720 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/pull/0.log" Dec 05 11:55:06 crc kubenswrapper[5014]: I1205 11:55:06.955002 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/util/0.log" Dec 05 11:55:06 crc kubenswrapper[5014]: I1205 11:55:06.984476 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/extract/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.129521 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/util/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.277214 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/util/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.301481 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/pull/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.309738 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/pull/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.515058 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/pull/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.520960 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/util/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.585028 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/extract/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.681506 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-utilities/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.858127 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-content/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.901957 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-utilities/0.log" Dec 05 11:55:07 crc kubenswrapper[5014]: I1205 11:55:07.908484 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-content/0.log" Dec 05 11:55:08 crc kubenswrapper[5014]: I1205 11:55:08.067311 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-content/0.log" Dec 05 11:55:08 crc kubenswrapper[5014]: I1205 11:55:08.081997 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-utilities/0.log" Dec 05 11:55:08 crc kubenswrapper[5014]: I1205 11:55:08.303185 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-utilities/0.log" Dec 05 11:55:08 crc kubenswrapper[5014]: I1205 11:55:08.512045 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-content/0.log" Dec 05 11:55:08 crc kubenswrapper[5014]: I1205 11:55:08.524918 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-utilities/0.log" Dec 05 11:55:08 crc kubenswrapper[5014]: I1205 11:55:08.572826 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-content/0.log" Dec 05 11:55:08 crc kubenswrapper[5014]: I1205 11:55:08.786017 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-utilities/0.log" Dec 05 11:55:08 crc kubenswrapper[5014]: I1205 11:55:08.827678 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-content/0.log" Dec 05 11:55:08 crc kubenswrapper[5014]: I1205 11:55:08.845035 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/registry-server/0.log" Dec 05 11:55:09 crc kubenswrapper[5014]: I1205 11:55:09.048616 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-blg9z_6324df91-5676-4d76-969c-ed24a6f6d7bf/marketplace-operator/0.log" Dec 05 11:55:09 crc kubenswrapper[5014]: I1205 11:55:09.298295 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-utilities/0.log" Dec 05 11:55:09 crc kubenswrapper[5014]: I1205 11:55:09.465658 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/registry-server/0.log" Dec 05 11:55:09 crc kubenswrapper[5014]: I1205 11:55:09.490460 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-content/0.log" Dec 05 11:55:09 crc kubenswrapper[5014]: I1205 11:55:09.552949 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-content/0.log" Dec 05 11:55:09 crc kubenswrapper[5014]: I1205 11:55:09.560340 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-utilities/0.log" Dec 05 11:55:09 crc kubenswrapper[5014]: I1205 11:55:09.715814 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-utilities/0.log" Dec 05 11:55:09 crc kubenswrapper[5014]: I1205 11:55:09.745960 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-content/0.log" Dec 05 11:55:09 crc kubenswrapper[5014]: I1205 11:55:09.812348 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/registry-server/0.log" Dec 05 11:55:09 crc kubenswrapper[5014]: I1205 11:55:09.920407 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-utilities/0.log" Dec 05 11:55:10 crc kubenswrapper[5014]: I1205 11:55:10.117010 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-content/0.log" Dec 05 11:55:10 crc kubenswrapper[5014]: I1205 11:55:10.129341 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-utilities/0.log" Dec 05 11:55:10 crc kubenswrapper[5014]: I1205 11:55:10.131201 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-content/0.log" Dec 05 11:55:10 crc kubenswrapper[5014]: I1205 11:55:10.319005 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:55:10 crc kubenswrapper[5014]: E1205 11:55:10.319350 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:55:10 crc kubenswrapper[5014]: I1205 11:55:10.329233 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-utilities/0.log" Dec 05 11:55:10 crc kubenswrapper[5014]: I1205 11:55:10.358192 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-content/0.log" Dec 05 11:55:10 crc kubenswrapper[5014]: I1205 11:55:10.844611 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/registry-server/0.log" Dec 05 11:55:24 crc kubenswrapper[5014]: I1205 11:55:24.318388 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:55:24 crc kubenswrapper[5014]: E1205 11:55:24.319238 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:55:35 crc kubenswrapper[5014]: I1205 11:55:35.319396 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:55:35 crc kubenswrapper[5014]: E1205 11:55:35.322244 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:55:47 crc kubenswrapper[5014]: I1205 11:55:47.318546 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:55:47 crc kubenswrapper[5014]: E1205 11:55:47.319359 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:56:00 crc kubenswrapper[5014]: I1205 11:56:00.317999 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:56:00 crc kubenswrapper[5014]: E1205 11:56:00.318822 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.617183 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k9n99"] Dec 05 11:56:03 crc kubenswrapper[5014]: E1205 11:56:03.618127 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerName="extract-utilities" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.618151 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerName="extract-utilities" Dec 05 11:56:03 crc kubenswrapper[5014]: E1205 11:56:03.618183 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerName="registry-server" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.618194 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerName="registry-server" Dec 05 11:56:03 crc kubenswrapper[5014]: E1205 11:56:03.618226 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerName="extract-content" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.618239 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerName="extract-content" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.618559 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="93cb7817-e1f4-42b3-bb15-79d7da27eb01" containerName="registry-server" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.620477 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.627801 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9n99"] Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.730046 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-catalog-content\") pod \"redhat-marketplace-k9n99\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.730099 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-utilities\") pod \"redhat-marketplace-k9n99\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.730198 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7v4s\" (UniqueName: \"kubernetes.io/projected/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-kube-api-access-d7v4s\") pod \"redhat-marketplace-k9n99\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.832042 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-utilities\") pod \"redhat-marketplace-k9n99\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.832167 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7v4s\" (UniqueName: \"kubernetes.io/projected/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-kube-api-access-d7v4s\") pod \"redhat-marketplace-k9n99\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.832350 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-catalog-content\") pod \"redhat-marketplace-k9n99\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.832724 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-utilities\") pod \"redhat-marketplace-k9n99\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.832848 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-catalog-content\") pod \"redhat-marketplace-k9n99\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.851958 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7v4s\" (UniqueName: \"kubernetes.io/projected/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-kube-api-access-d7v4s\") pod \"redhat-marketplace-k9n99\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:03 crc kubenswrapper[5014]: I1205 11:56:03.941542 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:04 crc kubenswrapper[5014]: I1205 11:56:04.421962 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9n99"] Dec 05 11:56:05 crc kubenswrapper[5014]: I1205 11:56:05.266664 5014 generic.go:334] "Generic (PLEG): container finished" podID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerID="b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078" exitCode=0 Dec 05 11:56:05 crc kubenswrapper[5014]: I1205 11:56:05.266705 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9n99" event={"ID":"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce","Type":"ContainerDied","Data":"b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078"} Dec 05 11:56:05 crc kubenswrapper[5014]: I1205 11:56:05.266730 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9n99" event={"ID":"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce","Type":"ContainerStarted","Data":"8a961c040a4a292be00deaaaf520bdd4be6dd7d55ddcf145afc475872ff6fc1a"} Dec 05 11:56:05 crc kubenswrapper[5014]: I1205 11:56:05.268970 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 11:56:08 crc kubenswrapper[5014]: I1205 11:56:08.299526 5014 generic.go:334] "Generic (PLEG): container finished" podID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerID="767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c" exitCode=0 Dec 05 11:56:08 crc kubenswrapper[5014]: I1205 11:56:08.299667 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9n99" event={"ID":"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce","Type":"ContainerDied","Data":"767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c"} Dec 05 11:56:09 crc kubenswrapper[5014]: I1205 11:56:09.311254 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9n99" event={"ID":"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce","Type":"ContainerStarted","Data":"53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704"} Dec 05 11:56:09 crc kubenswrapper[5014]: I1205 11:56:09.334051 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k9n99" podStartSLOduration=2.818075095 podStartE2EDuration="6.334031518s" podCreationTimestamp="2025-12-05 11:56:03 +0000 UTC" firstStartedPulling="2025-12-05 11:56:05.268702545 +0000 UTC m=+4092.216820249" lastFinishedPulling="2025-12-05 11:56:08.784658958 +0000 UTC m=+4095.732776672" observedRunningTime="2025-12-05 11:56:09.333308051 +0000 UTC m=+4096.281425755" watchObservedRunningTime="2025-12-05 11:56:09.334031518 +0000 UTC m=+4096.282149232" Dec 05 11:56:13 crc kubenswrapper[5014]: I1205 11:56:13.941831 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:13 crc kubenswrapper[5014]: I1205 11:56:13.943903 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:14 crc kubenswrapper[5014]: I1205 11:56:14.003123 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:14 crc kubenswrapper[5014]: I1205 11:56:14.318105 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:56:14 crc kubenswrapper[5014]: E1205 11:56:14.318740 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:56:14 crc kubenswrapper[5014]: I1205 11:56:14.419737 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:14 crc kubenswrapper[5014]: I1205 11:56:14.473380 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9n99"] Dec 05 11:56:16 crc kubenswrapper[5014]: I1205 11:56:16.367141 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k9n99" podUID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerName="registry-server" containerID="cri-o://53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704" gracePeriod=2 Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.343992 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.389460 5014 generic.go:334] "Generic (PLEG): container finished" podID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerID="53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704" exitCode=0 Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.389506 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9n99" event={"ID":"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce","Type":"ContainerDied","Data":"53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704"} Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.389537 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9n99" event={"ID":"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce","Type":"ContainerDied","Data":"8a961c040a4a292be00deaaaf520bdd4be6dd7d55ddcf145afc475872ff6fc1a"} Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.389541 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k9n99" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.389554 5014 scope.go:117] "RemoveContainer" containerID="53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.410804 5014 scope.go:117] "RemoveContainer" containerID="767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.415800 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7v4s\" (UniqueName: \"kubernetes.io/projected/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-kube-api-access-d7v4s\") pod \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.415922 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-catalog-content\") pod \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.415946 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-utilities\") pod \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\" (UID: \"c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce\") " Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.418322 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-utilities" (OuterVolumeSpecName: "utilities") pod "c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" (UID: "c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.433188 5014 scope.go:117] "RemoveContainer" containerID="b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.438887 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-kube-api-access-d7v4s" (OuterVolumeSpecName: "kube-api-access-d7v4s") pod "c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" (UID: "c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce"). InnerVolumeSpecName "kube-api-access-d7v4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.448070 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" (UID: "c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.518984 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.519048 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.519061 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7v4s\" (UniqueName: \"kubernetes.io/projected/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce-kube-api-access-d7v4s\") on node \"crc\" DevicePath \"\"" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.525073 5014 scope.go:117] "RemoveContainer" containerID="53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704" Dec 05 11:56:17 crc kubenswrapper[5014]: E1205 11:56:17.525524 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704\": container with ID starting with 53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704 not found: ID does not exist" containerID="53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.525572 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704"} err="failed to get container status \"53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704\": rpc error: code = NotFound desc = could not find container \"53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704\": container with ID starting with 53cc04cd8e04e4abcada137d6b3439053f3732454411b372b7758d59b3c41704 not found: ID does not exist" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.525591 5014 scope.go:117] "RemoveContainer" containerID="767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c" Dec 05 11:56:17 crc kubenswrapper[5014]: E1205 11:56:17.525825 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c\": container with ID starting with 767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c not found: ID does not exist" containerID="767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.525901 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c"} err="failed to get container status \"767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c\": rpc error: code = NotFound desc = could not find container \"767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c\": container with ID starting with 767db4d4e38f38c09f8c4983e2d5f696fe306e7d663934b0873f53bfb4185a2c not found: ID does not exist" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.525926 5014 scope.go:117] "RemoveContainer" containerID="b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078" Dec 05 11:56:17 crc kubenswrapper[5014]: E1205 11:56:17.526173 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078\": container with ID starting with b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078 not found: ID does not exist" containerID="b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.526238 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078"} err="failed to get container status \"b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078\": rpc error: code = NotFound desc = could not find container \"b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078\": container with ID starting with b621bcd2d3801e6dc55707b49e14e76817681df4b387163441694ee3be0b6078 not found: ID does not exist" Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.729155 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9n99"] Dec 05 11:56:17 crc kubenswrapper[5014]: I1205 11:56:17.738612 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9n99"] Dec 05 11:56:19 crc kubenswrapper[5014]: I1205 11:56:19.329555 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" path="/var/lib/kubelet/pods/c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce/volumes" Dec 05 11:56:29 crc kubenswrapper[5014]: I1205 11:56:29.319052 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:56:29 crc kubenswrapper[5014]: E1205 11:56:29.319952 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:56:43 crc kubenswrapper[5014]: I1205 11:56:43.327062 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:56:43 crc kubenswrapper[5014]: E1205 11:56:43.327845 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:56:51 crc kubenswrapper[5014]: I1205 11:56:51.734126 5014 generic.go:334] "Generic (PLEG): container finished" podID="37b6e3fb-a68e-4c8b-8472-cce8a2c29449" containerID="b8134ced2afb7c58666fdc550ef86ad1ecec84cf848d0ef5f20b1bada58997ab" exitCode=0 Dec 05 11:56:51 crc kubenswrapper[5014]: I1205 11:56:51.734253 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rwdzw/must-gather-8c7pj" event={"ID":"37b6e3fb-a68e-4c8b-8472-cce8a2c29449","Type":"ContainerDied","Data":"b8134ced2afb7c58666fdc550ef86ad1ecec84cf848d0ef5f20b1bada58997ab"} Dec 05 11:56:51 crc kubenswrapper[5014]: I1205 11:56:51.735193 5014 scope.go:117] "RemoveContainer" containerID="b8134ced2afb7c58666fdc550ef86ad1ecec84cf848d0ef5f20b1bada58997ab" Dec 05 11:56:52 crc kubenswrapper[5014]: I1205 11:56:52.416884 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rwdzw_must-gather-8c7pj_37b6e3fb-a68e-4c8b-8472-cce8a2c29449/gather/0.log" Dec 05 11:56:55 crc kubenswrapper[5014]: I1205 11:56:55.320842 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:56:55 crc kubenswrapper[5014]: E1205 11:56:55.321621 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 11:57:00 crc kubenswrapper[5014]: I1205 11:57:00.492630 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rwdzw/must-gather-8c7pj"] Dec 05 11:57:00 crc kubenswrapper[5014]: I1205 11:57:00.493440 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-rwdzw/must-gather-8c7pj" podUID="37b6e3fb-a68e-4c8b-8472-cce8a2c29449" containerName="copy" containerID="cri-o://45eae07d8ebf624e5a9f632e073b3e87f7554140ea8fcb706553c0a911989b58" gracePeriod=2 Dec 05 11:57:00 crc kubenswrapper[5014]: I1205 11:57:00.501338 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rwdzw/must-gather-8c7pj"] Dec 05 11:57:00 crc kubenswrapper[5014]: I1205 11:57:00.829195 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rwdzw_must-gather-8c7pj_37b6e3fb-a68e-4c8b-8472-cce8a2c29449/copy/0.log" Dec 05 11:57:00 crc kubenswrapper[5014]: I1205 11:57:00.829818 5014 generic.go:334] "Generic (PLEG): container finished" podID="37b6e3fb-a68e-4c8b-8472-cce8a2c29449" containerID="45eae07d8ebf624e5a9f632e073b3e87f7554140ea8fcb706553c0a911989b58" exitCode=143 Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.444169 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rwdzw_must-gather-8c7pj_37b6e3fb-a68e-4c8b-8472-cce8a2c29449/copy/0.log" Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.445030 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/must-gather-8c7pj" Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.624787 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-must-gather-output\") pod \"37b6e3fb-a68e-4c8b-8472-cce8a2c29449\" (UID: \"37b6e3fb-a68e-4c8b-8472-cce8a2c29449\") " Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.624943 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwnqn\" (UniqueName: \"kubernetes.io/projected/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-kube-api-access-rwnqn\") pod \"37b6e3fb-a68e-4c8b-8472-cce8a2c29449\" (UID: \"37b6e3fb-a68e-4c8b-8472-cce8a2c29449\") " Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.641958 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-kube-api-access-rwnqn" (OuterVolumeSpecName: "kube-api-access-rwnqn") pod "37b6e3fb-a68e-4c8b-8472-cce8a2c29449" (UID: "37b6e3fb-a68e-4c8b-8472-cce8a2c29449"). InnerVolumeSpecName "kube-api-access-rwnqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.727519 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwnqn\" (UniqueName: \"kubernetes.io/projected/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-kube-api-access-rwnqn\") on node \"crc\" DevicePath \"\"" Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.780169 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "37b6e3fb-a68e-4c8b-8472-cce8a2c29449" (UID: "37b6e3fb-a68e-4c8b-8472-cce8a2c29449"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.829814 5014 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/37b6e3fb-a68e-4c8b-8472-cce8a2c29449-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.838989 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rwdzw_must-gather-8c7pj_37b6e3fb-a68e-4c8b-8472-cce8a2c29449/copy/0.log" Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.839402 5014 scope.go:117] "RemoveContainer" containerID="45eae07d8ebf624e5a9f632e073b3e87f7554140ea8fcb706553c0a911989b58" Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.839456 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rwdzw/must-gather-8c7pj" Dec 05 11:57:01 crc kubenswrapper[5014]: I1205 11:57:01.858079 5014 scope.go:117] "RemoveContainer" containerID="b8134ced2afb7c58666fdc550ef86ad1ecec84cf848d0ef5f20b1bada58997ab" Dec 05 11:57:03 crc kubenswrapper[5014]: I1205 11:57:03.331288 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37b6e3fb-a68e-4c8b-8472-cce8a2c29449" path="/var/lib/kubelet/pods/37b6e3fb-a68e-4c8b-8472-cce8a2c29449/volumes" Dec 05 11:57:08 crc kubenswrapper[5014]: I1205 11:57:08.318473 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 11:57:08 crc kubenswrapper[5014]: I1205 11:57:08.942743 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"1ac388921aa80740568e587b3d1015f94de996983c82d0ab08892e7a174f4b6e"} Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.022466 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sj7n6"] Dec 05 11:57:34 crc kubenswrapper[5014]: E1205 11:57:34.023545 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerName="registry-server" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.023560 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerName="registry-server" Dec 05 11:57:34 crc kubenswrapper[5014]: E1205 11:57:34.023584 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerName="extract-content" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.023592 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerName="extract-content" Dec 05 11:57:34 crc kubenswrapper[5014]: E1205 11:57:34.023627 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37b6e3fb-a68e-4c8b-8472-cce8a2c29449" containerName="gather" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.023635 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="37b6e3fb-a68e-4c8b-8472-cce8a2c29449" containerName="gather" Dec 05 11:57:34 crc kubenswrapper[5014]: E1205 11:57:34.023645 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerName="extract-utilities" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.023654 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerName="extract-utilities" Dec 05 11:57:34 crc kubenswrapper[5014]: E1205 11:57:34.023680 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37b6e3fb-a68e-4c8b-8472-cce8a2c29449" containerName="copy" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.023688 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="37b6e3fb-a68e-4c8b-8472-cce8a2c29449" containerName="copy" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.023868 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="37b6e3fb-a68e-4c8b-8472-cce8a2c29449" containerName="copy" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.023885 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5ee3478-8ea9-46dd-8ef5-03b47b82b4ce" containerName="registry-server" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.023899 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="37b6e3fb-a68e-4c8b-8472-cce8a2c29449" containerName="gather" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.025405 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.033059 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sj7n6"] Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.226400 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdf7k\" (UniqueName: \"kubernetes.io/projected/99cc608e-bc5b-4721-8b3e-385bdded4070-kube-api-access-kdf7k\") pod \"certified-operators-sj7n6\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.226452 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-utilities\") pod \"certified-operators-sj7n6\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.227329 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-catalog-content\") pod \"certified-operators-sj7n6\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.330314 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-catalog-content\") pod \"certified-operators-sj7n6\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.330475 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdf7k\" (UniqueName: \"kubernetes.io/projected/99cc608e-bc5b-4721-8b3e-385bdded4070-kube-api-access-kdf7k\") pod \"certified-operators-sj7n6\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.330532 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-utilities\") pod \"certified-operators-sj7n6\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.330972 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-utilities\") pod \"certified-operators-sj7n6\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.331076 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-catalog-content\") pod \"certified-operators-sj7n6\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.352178 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdf7k\" (UniqueName: \"kubernetes.io/projected/99cc608e-bc5b-4721-8b3e-385bdded4070-kube-api-access-kdf7k\") pod \"certified-operators-sj7n6\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:34 crc kubenswrapper[5014]: I1205 11:57:34.650045 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:35 crc kubenswrapper[5014]: I1205 11:57:35.313842 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sj7n6"] Dec 05 11:57:36 crc kubenswrapper[5014]: I1205 11:57:36.205090 5014 generic.go:334] "Generic (PLEG): container finished" podID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerID="407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508" exitCode=0 Dec 05 11:57:36 crc kubenswrapper[5014]: I1205 11:57:36.205380 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sj7n6" event={"ID":"99cc608e-bc5b-4721-8b3e-385bdded4070","Type":"ContainerDied","Data":"407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508"} Dec 05 11:57:36 crc kubenswrapper[5014]: I1205 11:57:36.205486 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sj7n6" event={"ID":"99cc608e-bc5b-4721-8b3e-385bdded4070","Type":"ContainerStarted","Data":"867550dd262f846d3b7723b40ff2ac9451c6765bfa6b000779671fba6f7986e3"} Dec 05 11:57:38 crc kubenswrapper[5014]: I1205 11:57:38.227302 5014 generic.go:334] "Generic (PLEG): container finished" podID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerID="dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8" exitCode=0 Dec 05 11:57:38 crc kubenswrapper[5014]: I1205 11:57:38.227380 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sj7n6" event={"ID":"99cc608e-bc5b-4721-8b3e-385bdded4070","Type":"ContainerDied","Data":"dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8"} Dec 05 11:57:39 crc kubenswrapper[5014]: I1205 11:57:39.240072 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sj7n6" event={"ID":"99cc608e-bc5b-4721-8b3e-385bdded4070","Type":"ContainerStarted","Data":"07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0"} Dec 05 11:57:39 crc kubenswrapper[5014]: I1205 11:57:39.256649 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sj7n6" podStartSLOduration=3.690343302 podStartE2EDuration="6.256628103s" podCreationTimestamp="2025-12-05 11:57:33 +0000 UTC" firstStartedPulling="2025-12-05 11:57:36.206789273 +0000 UTC m=+4183.154906997" lastFinishedPulling="2025-12-05 11:57:38.773074094 +0000 UTC m=+4185.721191798" observedRunningTime="2025-12-05 11:57:39.25530528 +0000 UTC m=+4186.203423004" watchObservedRunningTime="2025-12-05 11:57:39.256628103 +0000 UTC m=+4186.204745807" Dec 05 11:57:44 crc kubenswrapper[5014]: I1205 11:57:44.651425 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:44 crc kubenswrapper[5014]: I1205 11:57:44.652143 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:45 crc kubenswrapper[5014]: I1205 11:57:45.111901 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:45 crc kubenswrapper[5014]: I1205 11:57:45.337174 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:45 crc kubenswrapper[5014]: I1205 11:57:45.386159 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sj7n6"] Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.304917 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sj7n6" podUID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerName="registry-server" containerID="cri-o://07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0" gracePeriod=2 Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.750043 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.837436 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdf7k\" (UniqueName: \"kubernetes.io/projected/99cc608e-bc5b-4721-8b3e-385bdded4070-kube-api-access-kdf7k\") pod \"99cc608e-bc5b-4721-8b3e-385bdded4070\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.837589 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-utilities\") pod \"99cc608e-bc5b-4721-8b3e-385bdded4070\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.837717 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-catalog-content\") pod \"99cc608e-bc5b-4721-8b3e-385bdded4070\" (UID: \"99cc608e-bc5b-4721-8b3e-385bdded4070\") " Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.838418 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-utilities" (OuterVolumeSpecName: "utilities") pod "99cc608e-bc5b-4721-8b3e-385bdded4070" (UID: "99cc608e-bc5b-4721-8b3e-385bdded4070"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.844088 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99cc608e-bc5b-4721-8b3e-385bdded4070-kube-api-access-kdf7k" (OuterVolumeSpecName: "kube-api-access-kdf7k") pod "99cc608e-bc5b-4721-8b3e-385bdded4070" (UID: "99cc608e-bc5b-4721-8b3e-385bdded4070"). InnerVolumeSpecName "kube-api-access-kdf7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.902951 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99cc608e-bc5b-4721-8b3e-385bdded4070" (UID: "99cc608e-bc5b-4721-8b3e-385bdded4070"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.939783 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.939819 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdf7k\" (UniqueName: \"kubernetes.io/projected/99cc608e-bc5b-4721-8b3e-385bdded4070-kube-api-access-kdf7k\") on node \"crc\" DevicePath \"\"" Dec 05 11:57:47 crc kubenswrapper[5014]: I1205 11:57:47.939833 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99cc608e-bc5b-4721-8b3e-385bdded4070-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.318200 5014 generic.go:334] "Generic (PLEG): container finished" podID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerID="07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0" exitCode=0 Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.318244 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sj7n6" event={"ID":"99cc608e-bc5b-4721-8b3e-385bdded4070","Type":"ContainerDied","Data":"07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0"} Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.318351 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sj7n6" event={"ID":"99cc608e-bc5b-4721-8b3e-385bdded4070","Type":"ContainerDied","Data":"867550dd262f846d3b7723b40ff2ac9451c6765bfa6b000779671fba6f7986e3"} Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.318377 5014 scope.go:117] "RemoveContainer" containerID="07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0" Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.318459 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sj7n6" Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.342750 5014 scope.go:117] "RemoveContainer" containerID="dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8" Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.354861 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sj7n6"] Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.362915 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sj7n6"] Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.384121 5014 scope.go:117] "RemoveContainer" containerID="407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508" Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.411575 5014 scope.go:117] "RemoveContainer" containerID="07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0" Dec 05 11:57:48 crc kubenswrapper[5014]: E1205 11:57:48.412075 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0\": container with ID starting with 07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0 not found: ID does not exist" containerID="07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0" Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.412119 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0"} err="failed to get container status \"07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0\": rpc error: code = NotFound desc = could not find container \"07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0\": container with ID starting with 07f3fd39bf01b71018fbd310737f0e911f3ed86fdcbd586a2c1a146c11b1f9d0 not found: ID does not exist" Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.412146 5014 scope.go:117] "RemoveContainer" containerID="dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8" Dec 05 11:57:48 crc kubenswrapper[5014]: E1205 11:57:48.412486 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8\": container with ID starting with dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8 not found: ID does not exist" containerID="dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8" Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.412518 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8"} err="failed to get container status \"dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8\": rpc error: code = NotFound desc = could not find container \"dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8\": container with ID starting with dce96f340746246e0ad64dedebe7fdc60755a2de323340bd7e8810f8c4c177f8 not found: ID does not exist" Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.412542 5014 scope.go:117] "RemoveContainer" containerID="407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508" Dec 05 11:57:48 crc kubenswrapper[5014]: E1205 11:57:48.412937 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508\": container with ID starting with 407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508 not found: ID does not exist" containerID="407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508" Dec 05 11:57:48 crc kubenswrapper[5014]: I1205 11:57:48.412966 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508"} err="failed to get container status \"407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508\": rpc error: code = NotFound desc = could not find container \"407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508\": container with ID starting with 407a1eb611860c298607def1bb3fb3e8c43e624ea95f9c6c00563742e5920508 not found: ID does not exist" Dec 05 11:57:49 crc kubenswrapper[5014]: I1205 11:57:49.333656 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99cc608e-bc5b-4721-8b3e-385bdded4070" path="/var/lib/kubelet/pods/99cc608e-bc5b-4721-8b3e-385bdded4070/volumes" Dec 05 11:58:06 crc kubenswrapper[5014]: I1205 11:58:06.744929 5014 scope.go:117] "RemoveContainer" containerID="ea5e705e09a6ed1701d13ebe302efe0ece796cdcdad7ac93e003fc8430de371b" Dec 05 11:59:32 crc kubenswrapper[5014]: I1205 11:59:32.936400 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 11:59:32 crc kubenswrapper[5014]: I1205 11:59:32.937047 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.186142 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr"] Dec 05 12:00:00 crc kubenswrapper[5014]: E1205 12:00:00.191941 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerName="extract-utilities" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.191978 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerName="extract-utilities" Dec 05 12:00:00 crc kubenswrapper[5014]: E1205 12:00:00.192027 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerName="extract-content" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.192036 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerName="extract-content" Dec 05 12:00:00 crc kubenswrapper[5014]: E1205 12:00:00.192047 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerName="registry-server" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.192057 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerName="registry-server" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.192322 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="99cc608e-bc5b-4721-8b3e-385bdded4070" containerName="registry-server" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.193089 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.195623 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.195796 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.197412 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr"] Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.337540 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-secret-volume\") pod \"collect-profiles-29415600-9wxvr\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.337716 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gg9gp\" (UniqueName: \"kubernetes.io/projected/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-kube-api-access-gg9gp\") pod \"collect-profiles-29415600-9wxvr\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.337782 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-config-volume\") pod \"collect-profiles-29415600-9wxvr\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.439184 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gg9gp\" (UniqueName: \"kubernetes.io/projected/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-kube-api-access-gg9gp\") pod \"collect-profiles-29415600-9wxvr\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.439252 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-config-volume\") pod \"collect-profiles-29415600-9wxvr\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.439340 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-secret-volume\") pod \"collect-profiles-29415600-9wxvr\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.440663 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-config-volume\") pod \"collect-profiles-29415600-9wxvr\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.445326 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-secret-volume\") pod \"collect-profiles-29415600-9wxvr\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.457326 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gg9gp\" (UniqueName: \"kubernetes.io/projected/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-kube-api-access-gg9gp\") pod \"collect-profiles-29415600-9wxvr\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.522464 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:00 crc kubenswrapper[5014]: I1205 12:00:00.940934 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr"] Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.186730 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-48jk6/must-gather-flvrt"] Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.188619 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/must-gather-flvrt" Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.191229 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-48jk6"/"openshift-service-ca.crt" Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.193142 5014 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-48jk6"/"kube-root-ca.crt" Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.259466 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8d7c7d58-bc06-4985-97da-55244710418f-must-gather-output\") pod \"must-gather-flvrt\" (UID: \"8d7c7d58-bc06-4985-97da-55244710418f\") " pod="openshift-must-gather-48jk6/must-gather-flvrt" Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.259828 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgk2n\" (UniqueName: \"kubernetes.io/projected/8d7c7d58-bc06-4985-97da-55244710418f-kube-api-access-xgk2n\") pod \"must-gather-flvrt\" (UID: \"8d7c7d58-bc06-4985-97da-55244710418f\") " pod="openshift-must-gather-48jk6/must-gather-flvrt" Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.361322 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8d7c7d58-bc06-4985-97da-55244710418f-must-gather-output\") pod \"must-gather-flvrt\" (UID: \"8d7c7d58-bc06-4985-97da-55244710418f\") " pod="openshift-must-gather-48jk6/must-gather-flvrt" Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.361449 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgk2n\" (UniqueName: \"kubernetes.io/projected/8d7c7d58-bc06-4985-97da-55244710418f-kube-api-access-xgk2n\") pod \"must-gather-flvrt\" (UID: \"8d7c7d58-bc06-4985-97da-55244710418f\") " pod="openshift-must-gather-48jk6/must-gather-flvrt" Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.361867 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8d7c7d58-bc06-4985-97da-55244710418f-must-gather-output\") pod \"must-gather-flvrt\" (UID: \"8d7c7d58-bc06-4985-97da-55244710418f\") " pod="openshift-must-gather-48jk6/must-gather-flvrt" Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.382111 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgk2n\" (UniqueName: \"kubernetes.io/projected/8d7c7d58-bc06-4985-97da-55244710418f-kube-api-access-xgk2n\") pod \"must-gather-flvrt\" (UID: \"8d7c7d58-bc06-4985-97da-55244710418f\") " pod="openshift-must-gather-48jk6/must-gather-flvrt" Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.542470 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/must-gather-flvrt" Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.574596 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-48jk6/must-gather-flvrt"] Dec 05 12:00:01 crc kubenswrapper[5014]: I1205 12:00:01.602987 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" event={"ID":"9efc8ee9-cd7a-4688-ae6d-890838a0ad72","Type":"ContainerStarted","Data":"77a9a3554ba773b9fde27573f55fc66f53b33d526e95bdf466528b9b02e989f9"} Dec 05 12:00:02 crc kubenswrapper[5014]: I1205 12:00:02.084916 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-48jk6/must-gather-flvrt"] Dec 05 12:00:02 crc kubenswrapper[5014]: W1205 12:00:02.087973 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d7c7d58_bc06_4985_97da_55244710418f.slice/crio-7523ef978b4b914f38a9969821697c4a860707cc541e17add0d9579d02fc9669 WatchSource:0}: Error finding container 7523ef978b4b914f38a9969821697c4a860707cc541e17add0d9579d02fc9669: Status 404 returned error can't find the container with id 7523ef978b4b914f38a9969821697c4a860707cc541e17add0d9579d02fc9669 Dec 05 12:00:02 crc kubenswrapper[5014]: I1205 12:00:02.631836 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/must-gather-flvrt" event={"ID":"8d7c7d58-bc06-4985-97da-55244710418f","Type":"ContainerStarted","Data":"4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a"} Dec 05 12:00:02 crc kubenswrapper[5014]: I1205 12:00:02.632118 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/must-gather-flvrt" event={"ID":"8d7c7d58-bc06-4985-97da-55244710418f","Type":"ContainerStarted","Data":"7523ef978b4b914f38a9969821697c4a860707cc541e17add0d9579d02fc9669"} Dec 05 12:00:02 crc kubenswrapper[5014]: I1205 12:00:02.644875 5014 generic.go:334] "Generic (PLEG): container finished" podID="9efc8ee9-cd7a-4688-ae6d-890838a0ad72" containerID="93bdd8b42eeb817390325f267aab26fb1018fea40b4b8bd0fa23d44461f810ee" exitCode=0 Dec 05 12:00:02 crc kubenswrapper[5014]: I1205 12:00:02.644983 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" event={"ID":"9efc8ee9-cd7a-4688-ae6d-890838a0ad72","Type":"ContainerDied","Data":"93bdd8b42eeb817390325f267aab26fb1018fea40b4b8bd0fa23d44461f810ee"} Dec 05 12:00:02 crc kubenswrapper[5014]: I1205 12:00:02.937015 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:00:02 crc kubenswrapper[5014]: I1205 12:00:02.937091 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:00:03 crc kubenswrapper[5014]: I1205 12:00:03.654954 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/must-gather-flvrt" event={"ID":"8d7c7d58-bc06-4985-97da-55244710418f","Type":"ContainerStarted","Data":"a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5"} Dec 05 12:00:03 crc kubenswrapper[5014]: I1205 12:00:03.670402 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-48jk6/must-gather-flvrt" podStartSLOduration=2.670381579 podStartE2EDuration="2.670381579s" podCreationTimestamp="2025-12-05 12:00:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:00:03.66797959 +0000 UTC m=+4330.616097314" watchObservedRunningTime="2025-12-05 12:00:03.670381579 +0000 UTC m=+4330.618499283" Dec 05 12:00:03 crc kubenswrapper[5014]: I1205 12:00:03.997680 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.116240 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gg9gp\" (UniqueName: \"kubernetes.io/projected/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-kube-api-access-gg9gp\") pod \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.116473 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-secret-volume\") pod \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.116559 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-config-volume\") pod \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\" (UID: \"9efc8ee9-cd7a-4688-ae6d-890838a0ad72\") " Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.118216 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-config-volume" (OuterVolumeSpecName: "config-volume") pod "9efc8ee9-cd7a-4688-ae6d-890838a0ad72" (UID: "9efc8ee9-cd7a-4688-ae6d-890838a0ad72"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.125530 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-kube-api-access-gg9gp" (OuterVolumeSpecName: "kube-api-access-gg9gp") pod "9efc8ee9-cd7a-4688-ae6d-890838a0ad72" (UID: "9efc8ee9-cd7a-4688-ae6d-890838a0ad72"). InnerVolumeSpecName "kube-api-access-gg9gp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.131398 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9efc8ee9-cd7a-4688-ae6d-890838a0ad72" (UID: "9efc8ee9-cd7a-4688-ae6d-890838a0ad72"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.218867 5014 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.219173 5014 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.219188 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gg9gp\" (UniqueName: \"kubernetes.io/projected/9efc8ee9-cd7a-4688-ae6d-890838a0ad72-kube-api-access-gg9gp\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.667296 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" event={"ID":"9efc8ee9-cd7a-4688-ae6d-890838a0ad72","Type":"ContainerDied","Data":"77a9a3554ba773b9fde27573f55fc66f53b33d526e95bdf466528b9b02e989f9"} Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.667356 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77a9a3554ba773b9fde27573f55fc66f53b33d526e95bdf466528b9b02e989f9" Dec 05 12:00:04 crc kubenswrapper[5014]: I1205 12:00:04.667368 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415600-9wxvr" Dec 05 12:00:05 crc kubenswrapper[5014]: I1205 12:00:05.085204 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw"] Dec 05 12:00:05 crc kubenswrapper[5014]: I1205 12:00:05.095980 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415555-sx2lw"] Dec 05 12:00:05 crc kubenswrapper[5014]: I1205 12:00:05.330710 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0ad9bdc-7eeb-451e-a9ed-cb7421206906" path="/var/lib/kubelet/pods/a0ad9bdc-7eeb-451e-a9ed-cb7421206906/volumes" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.271947 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-48jk6/crc-debug-fcwl8"] Dec 05 12:00:06 crc kubenswrapper[5014]: E1205 12:00:06.272494 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9efc8ee9-cd7a-4688-ae6d-890838a0ad72" containerName="collect-profiles" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.272513 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="9efc8ee9-cd7a-4688-ae6d-890838a0ad72" containerName="collect-profiles" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.272787 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="9efc8ee9-cd7a-4688-ae6d-890838a0ad72" containerName="collect-profiles" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.273628 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-fcwl8" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.276238 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-48jk6"/"default-dockercfg-rtqqj" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.358457 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8bx6\" (UniqueName: \"kubernetes.io/projected/e1b2d95b-eefd-4667-9087-ba1efe9979cb-kube-api-access-n8bx6\") pod \"crc-debug-fcwl8\" (UID: \"e1b2d95b-eefd-4667-9087-ba1efe9979cb\") " pod="openshift-must-gather-48jk6/crc-debug-fcwl8" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.358577 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1b2d95b-eefd-4667-9087-ba1efe9979cb-host\") pod \"crc-debug-fcwl8\" (UID: \"e1b2d95b-eefd-4667-9087-ba1efe9979cb\") " pod="openshift-must-gather-48jk6/crc-debug-fcwl8" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.460340 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1b2d95b-eefd-4667-9087-ba1efe9979cb-host\") pod \"crc-debug-fcwl8\" (UID: \"e1b2d95b-eefd-4667-9087-ba1efe9979cb\") " pod="openshift-must-gather-48jk6/crc-debug-fcwl8" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.460725 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8bx6\" (UniqueName: \"kubernetes.io/projected/e1b2d95b-eefd-4667-9087-ba1efe9979cb-kube-api-access-n8bx6\") pod \"crc-debug-fcwl8\" (UID: \"e1b2d95b-eefd-4667-9087-ba1efe9979cb\") " pod="openshift-must-gather-48jk6/crc-debug-fcwl8" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.460525 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1b2d95b-eefd-4667-9087-ba1efe9979cb-host\") pod \"crc-debug-fcwl8\" (UID: \"e1b2d95b-eefd-4667-9087-ba1efe9979cb\") " pod="openshift-must-gather-48jk6/crc-debug-fcwl8" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.481404 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8bx6\" (UniqueName: \"kubernetes.io/projected/e1b2d95b-eefd-4667-9087-ba1efe9979cb-kube-api-access-n8bx6\") pod \"crc-debug-fcwl8\" (UID: \"e1b2d95b-eefd-4667-9087-ba1efe9979cb\") " pod="openshift-must-gather-48jk6/crc-debug-fcwl8" Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.595099 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-fcwl8" Dec 05 12:00:06 crc kubenswrapper[5014]: W1205 12:00:06.625408 5014 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1b2d95b_eefd_4667_9087_ba1efe9979cb.slice/crio-29c7290676551ad2c34a91816eedd79473d29308d0d274f23ce6eec385a69c16 WatchSource:0}: Error finding container 29c7290676551ad2c34a91816eedd79473d29308d0d274f23ce6eec385a69c16: Status 404 returned error can't find the container with id 29c7290676551ad2c34a91816eedd79473d29308d0d274f23ce6eec385a69c16 Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.687780 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/crc-debug-fcwl8" event={"ID":"e1b2d95b-eefd-4667-9087-ba1efe9979cb","Type":"ContainerStarted","Data":"29c7290676551ad2c34a91816eedd79473d29308d0d274f23ce6eec385a69c16"} Dec 05 12:00:06 crc kubenswrapper[5014]: I1205 12:00:06.833948 5014 scope.go:117] "RemoveContainer" containerID="47dc1f1a90e9825137b7d99f8474b8ff1492d25eabad65fec236fc4ad9ab4959" Dec 05 12:00:07 crc kubenswrapper[5014]: I1205 12:00:07.698331 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/crc-debug-fcwl8" event={"ID":"e1b2d95b-eefd-4667-9087-ba1efe9979cb","Type":"ContainerStarted","Data":"1f520a8510a30793f9d2a83a24e0bb0252dcb43e2e387841e16bab0e3ea54f17"} Dec 05 12:00:07 crc kubenswrapper[5014]: I1205 12:00:07.728962 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-48jk6/crc-debug-fcwl8" podStartSLOduration=1.728938643 podStartE2EDuration="1.728938643s" podCreationTimestamp="2025-12-05 12:00:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:00:07.7226419 +0000 UTC m=+4334.670759604" watchObservedRunningTime="2025-12-05 12:00:07.728938643 +0000 UTC m=+4334.677056347" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.233395 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7ln9h"] Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.235614 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.257853 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7ln9h"] Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.346327 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-utilities\") pod \"community-operators-7ln9h\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.346435 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vqbv\" (UniqueName: \"kubernetes.io/projected/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-kube-api-access-9vqbv\") pod \"community-operators-7ln9h\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.346506 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-catalog-content\") pod \"community-operators-7ln9h\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.448344 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-catalog-content\") pod \"community-operators-7ln9h\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.448490 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-utilities\") pod \"community-operators-7ln9h\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.448527 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vqbv\" (UniqueName: \"kubernetes.io/projected/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-kube-api-access-9vqbv\") pod \"community-operators-7ln9h\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.448812 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-catalog-content\") pod \"community-operators-7ln9h\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.448912 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-utilities\") pod \"community-operators-7ln9h\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.467031 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vqbv\" (UniqueName: \"kubernetes.io/projected/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-kube-api-access-9vqbv\") pod \"community-operators-7ln9h\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:31 crc kubenswrapper[5014]: I1205 12:00:31.569377 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:32 crc kubenswrapper[5014]: I1205 12:00:32.112084 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7ln9h"] Dec 05 12:00:32 crc kubenswrapper[5014]: I1205 12:00:32.936670 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:00:32 crc kubenswrapper[5014]: I1205 12:00:32.937053 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:00:32 crc kubenswrapper[5014]: I1205 12:00:32.937117 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 12:00:32 crc kubenswrapper[5014]: I1205 12:00:32.937889 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1ac388921aa80740568e587b3d1015f94de996983c82d0ab08892e7a174f4b6e"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:00:32 crc kubenswrapper[5014]: I1205 12:00:32.937955 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://1ac388921aa80740568e587b3d1015f94de996983c82d0ab08892e7a174f4b6e" gracePeriod=600 Dec 05 12:00:32 crc kubenswrapper[5014]: I1205 12:00:32.946362 5014 generic.go:334] "Generic (PLEG): container finished" podID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerID="8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396" exitCode=0 Dec 05 12:00:32 crc kubenswrapper[5014]: I1205 12:00:32.946413 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ln9h" event={"ID":"68262d79-8bd7-4bb7-b46c-7395a5f63f6f","Type":"ContainerDied","Data":"8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396"} Dec 05 12:00:32 crc kubenswrapper[5014]: I1205 12:00:32.946444 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ln9h" event={"ID":"68262d79-8bd7-4bb7-b46c-7395a5f63f6f","Type":"ContainerStarted","Data":"1796836a6faa9e3275ac223a1844f9a52283d5b3c6bb4afacaf823c5711bda35"} Dec 05 12:00:33 crc kubenswrapper[5014]: I1205 12:00:33.957986 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ln9h" event={"ID":"68262d79-8bd7-4bb7-b46c-7395a5f63f6f","Type":"ContainerStarted","Data":"5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49"} Dec 05 12:00:33 crc kubenswrapper[5014]: I1205 12:00:33.961487 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="1ac388921aa80740568e587b3d1015f94de996983c82d0ab08892e7a174f4b6e" exitCode=0 Dec 05 12:00:33 crc kubenswrapper[5014]: I1205 12:00:33.961521 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"1ac388921aa80740568e587b3d1015f94de996983c82d0ab08892e7a174f4b6e"} Dec 05 12:00:33 crc kubenswrapper[5014]: I1205 12:00:33.961538 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea"} Dec 05 12:00:33 crc kubenswrapper[5014]: I1205 12:00:33.961556 5014 scope.go:117] "RemoveContainer" containerID="5125da642c9ef95730d1e7fe4dc05eef72fd7a2839235274cefbcf4c4740fe35" Dec 05 12:00:34 crc kubenswrapper[5014]: I1205 12:00:34.971098 5014 generic.go:334] "Generic (PLEG): container finished" podID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerID="5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49" exitCode=0 Dec 05 12:00:34 crc kubenswrapper[5014]: I1205 12:00:34.971140 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ln9h" event={"ID":"68262d79-8bd7-4bb7-b46c-7395a5f63f6f","Type":"ContainerDied","Data":"5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49"} Dec 05 12:00:35 crc kubenswrapper[5014]: I1205 12:00:35.984679 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ln9h" event={"ID":"68262d79-8bd7-4bb7-b46c-7395a5f63f6f","Type":"ContainerStarted","Data":"f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16"} Dec 05 12:00:36 crc kubenswrapper[5014]: I1205 12:00:36.008988 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7ln9h" podStartSLOduration=2.512360792 podStartE2EDuration="5.00897164s" podCreationTimestamp="2025-12-05 12:00:31 +0000 UTC" firstStartedPulling="2025-12-05 12:00:32.948066155 +0000 UTC m=+4359.896183859" lastFinishedPulling="2025-12-05 12:00:35.444676983 +0000 UTC m=+4362.392794707" observedRunningTime="2025-12-05 12:00:36.005601938 +0000 UTC m=+4362.953719652" watchObservedRunningTime="2025-12-05 12:00:36.00897164 +0000 UTC m=+4362.957089344" Dec 05 12:00:41 crc kubenswrapper[5014]: I1205 12:00:41.029796 5014 generic.go:334] "Generic (PLEG): container finished" podID="e1b2d95b-eefd-4667-9087-ba1efe9979cb" containerID="1f520a8510a30793f9d2a83a24e0bb0252dcb43e2e387841e16bab0e3ea54f17" exitCode=0 Dec 05 12:00:41 crc kubenswrapper[5014]: I1205 12:00:41.029884 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/crc-debug-fcwl8" event={"ID":"e1b2d95b-eefd-4667-9087-ba1efe9979cb","Type":"ContainerDied","Data":"1f520a8510a30793f9d2a83a24e0bb0252dcb43e2e387841e16bab0e3ea54f17"} Dec 05 12:00:41 crc kubenswrapper[5014]: I1205 12:00:41.570240 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:41 crc kubenswrapper[5014]: I1205 12:00:41.570578 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:41 crc kubenswrapper[5014]: I1205 12:00:41.634419 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.081451 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.147056 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-fcwl8" Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.153199 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7ln9h"] Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.187208 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-48jk6/crc-debug-fcwl8"] Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.197888 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-48jk6/crc-debug-fcwl8"] Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.248661 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1b2d95b-eefd-4667-9087-ba1efe9979cb-host\") pod \"e1b2d95b-eefd-4667-9087-ba1efe9979cb\" (UID: \"e1b2d95b-eefd-4667-9087-ba1efe9979cb\") " Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.248806 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e1b2d95b-eefd-4667-9087-ba1efe9979cb-host" (OuterVolumeSpecName: "host") pod "e1b2d95b-eefd-4667-9087-ba1efe9979cb" (UID: "e1b2d95b-eefd-4667-9087-ba1efe9979cb"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.248929 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8bx6\" (UniqueName: \"kubernetes.io/projected/e1b2d95b-eefd-4667-9087-ba1efe9979cb-kube-api-access-n8bx6\") pod \"e1b2d95b-eefd-4667-9087-ba1efe9979cb\" (UID: \"e1b2d95b-eefd-4667-9087-ba1efe9979cb\") " Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.249412 5014 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1b2d95b-eefd-4667-9087-ba1efe9979cb-host\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.256339 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1b2d95b-eefd-4667-9087-ba1efe9979cb-kube-api-access-n8bx6" (OuterVolumeSpecName: "kube-api-access-n8bx6") pod "e1b2d95b-eefd-4667-9087-ba1efe9979cb" (UID: "e1b2d95b-eefd-4667-9087-ba1efe9979cb"). InnerVolumeSpecName "kube-api-access-n8bx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:00:42 crc kubenswrapper[5014]: I1205 12:00:42.350671 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8bx6\" (UniqueName: \"kubernetes.io/projected/e1b2d95b-eefd-4667-9087-ba1efe9979cb-kube-api-access-n8bx6\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.052135 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29c7290676551ad2c34a91816eedd79473d29308d0d274f23ce6eec385a69c16" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.052166 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-fcwl8" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.327742 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1b2d95b-eefd-4667-9087-ba1efe9979cb" path="/var/lib/kubelet/pods/e1b2d95b-eefd-4667-9087-ba1efe9979cb/volumes" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.345353 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-48jk6/crc-debug-2sqb9"] Dec 05 12:00:43 crc kubenswrapper[5014]: E1205 12:00:43.345956 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1b2d95b-eefd-4667-9087-ba1efe9979cb" containerName="container-00" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.346034 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1b2d95b-eefd-4667-9087-ba1efe9979cb" containerName="container-00" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.346330 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1b2d95b-eefd-4667-9087-ba1efe9979cb" containerName="container-00" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.346983 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-2sqb9" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.349397 5014 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-48jk6"/"default-dockercfg-rtqqj" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.472086 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np54r\" (UniqueName: \"kubernetes.io/projected/0595547b-e59e-49d0-9f2f-05103890af0b-kube-api-access-np54r\") pod \"crc-debug-2sqb9\" (UID: \"0595547b-e59e-49d0-9f2f-05103890af0b\") " pod="openshift-must-gather-48jk6/crc-debug-2sqb9" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.472208 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0595547b-e59e-49d0-9f2f-05103890af0b-host\") pod \"crc-debug-2sqb9\" (UID: \"0595547b-e59e-49d0-9f2f-05103890af0b\") " pod="openshift-must-gather-48jk6/crc-debug-2sqb9" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.573796 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np54r\" (UniqueName: \"kubernetes.io/projected/0595547b-e59e-49d0-9f2f-05103890af0b-kube-api-access-np54r\") pod \"crc-debug-2sqb9\" (UID: \"0595547b-e59e-49d0-9f2f-05103890af0b\") " pod="openshift-must-gather-48jk6/crc-debug-2sqb9" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.573901 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0595547b-e59e-49d0-9f2f-05103890af0b-host\") pod \"crc-debug-2sqb9\" (UID: \"0595547b-e59e-49d0-9f2f-05103890af0b\") " pod="openshift-must-gather-48jk6/crc-debug-2sqb9" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.574071 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0595547b-e59e-49d0-9f2f-05103890af0b-host\") pod \"crc-debug-2sqb9\" (UID: \"0595547b-e59e-49d0-9f2f-05103890af0b\") " pod="openshift-must-gather-48jk6/crc-debug-2sqb9" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.594048 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np54r\" (UniqueName: \"kubernetes.io/projected/0595547b-e59e-49d0-9f2f-05103890af0b-kube-api-access-np54r\") pod \"crc-debug-2sqb9\" (UID: \"0595547b-e59e-49d0-9f2f-05103890af0b\") " pod="openshift-must-gather-48jk6/crc-debug-2sqb9" Dec 05 12:00:43 crc kubenswrapper[5014]: I1205 12:00:43.662442 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-2sqb9" Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.061099 5014 generic.go:334] "Generic (PLEG): container finished" podID="0595547b-e59e-49d0-9f2f-05103890af0b" containerID="c87be00b60cd7c9645c55dcb6540eb21209a6398a5efb45b2a9c7c910b47d861" exitCode=0 Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.061179 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/crc-debug-2sqb9" event={"ID":"0595547b-e59e-49d0-9f2f-05103890af0b","Type":"ContainerDied","Data":"c87be00b60cd7c9645c55dcb6540eb21209a6398a5efb45b2a9c7c910b47d861"} Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.061218 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/crc-debug-2sqb9" event={"ID":"0595547b-e59e-49d0-9f2f-05103890af0b","Type":"ContainerStarted","Data":"e3f88c56e319131270415d38bfa0dfa796bd6fafab577cdb51585420fdb9d37e"} Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.061344 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7ln9h" podUID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerName="registry-server" containerID="cri-o://f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16" gracePeriod=2 Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.484863 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-48jk6/crc-debug-2sqb9"] Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.487433 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.498663 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-48jk6/crc-debug-2sqb9"] Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.593130 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vqbv\" (UniqueName: \"kubernetes.io/projected/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-kube-api-access-9vqbv\") pod \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.593210 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-utilities\") pod \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.593416 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-catalog-content\") pod \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\" (UID: \"68262d79-8bd7-4bb7-b46c-7395a5f63f6f\") " Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.595348 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-utilities" (OuterVolumeSpecName: "utilities") pod "68262d79-8bd7-4bb7-b46c-7395a5f63f6f" (UID: "68262d79-8bd7-4bb7-b46c-7395a5f63f6f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.603490 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-kube-api-access-9vqbv" (OuterVolumeSpecName: "kube-api-access-9vqbv") pod "68262d79-8bd7-4bb7-b46c-7395a5f63f6f" (UID: "68262d79-8bd7-4bb7-b46c-7395a5f63f6f"). InnerVolumeSpecName "kube-api-access-9vqbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.696244 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vqbv\" (UniqueName: \"kubernetes.io/projected/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-kube-api-access-9vqbv\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.696306 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.736414 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "68262d79-8bd7-4bb7-b46c-7395a5f63f6f" (UID: "68262d79-8bd7-4bb7-b46c-7395a5f63f6f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:00:44 crc kubenswrapper[5014]: I1205 12:00:44.798785 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68262d79-8bd7-4bb7-b46c-7395a5f63f6f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.077582 5014 generic.go:334] "Generic (PLEG): container finished" podID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerID="f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16" exitCode=0 Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.077642 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7ln9h" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.077665 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ln9h" event={"ID":"68262d79-8bd7-4bb7-b46c-7395a5f63f6f","Type":"ContainerDied","Data":"f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16"} Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.078073 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ln9h" event={"ID":"68262d79-8bd7-4bb7-b46c-7395a5f63f6f","Type":"ContainerDied","Data":"1796836a6faa9e3275ac223a1844f9a52283d5b3c6bb4afacaf823c5711bda35"} Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.078103 5014 scope.go:117] "RemoveContainer" containerID="f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.191853 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-2sqb9" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.207097 5014 scope.go:117] "RemoveContainer" containerID="5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.216012 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7ln9h"] Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.231425 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7ln9h"] Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.232893 5014 scope.go:117] "RemoveContainer" containerID="8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.267111 5014 scope.go:117] "RemoveContainer" containerID="f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16" Dec 05 12:00:45 crc kubenswrapper[5014]: E1205 12:00:45.267618 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16\": container with ID starting with f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16 not found: ID does not exist" containerID="f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.267703 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16"} err="failed to get container status \"f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16\": rpc error: code = NotFound desc = could not find container \"f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16\": container with ID starting with f4a40f6d0d750ce1f7a1e5325555b525cfdbc3cf6dffed1e3e11d55871c73e16 not found: ID does not exist" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.267742 5014 scope.go:117] "RemoveContainer" containerID="5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49" Dec 05 12:00:45 crc kubenswrapper[5014]: E1205 12:00:45.268156 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49\": container with ID starting with 5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49 not found: ID does not exist" containerID="5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.268180 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49"} err="failed to get container status \"5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49\": rpc error: code = NotFound desc = could not find container \"5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49\": container with ID starting with 5a7274d58f06c81e0192b7008e940e21906b58fdd73bc877b78cbc761e1c5c49 not found: ID does not exist" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.268196 5014 scope.go:117] "RemoveContainer" containerID="8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396" Dec 05 12:00:45 crc kubenswrapper[5014]: E1205 12:00:45.268469 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396\": container with ID starting with 8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396 not found: ID does not exist" containerID="8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.268511 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396"} err="failed to get container status \"8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396\": rpc error: code = NotFound desc = could not find container \"8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396\": container with ID starting with 8c26364d7ede539ced07f6f1b109ed7af456e8ef0c2a44c747af8879059ec396 not found: ID does not exist" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.306354 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0595547b-e59e-49d0-9f2f-05103890af0b-host\") pod \"0595547b-e59e-49d0-9f2f-05103890af0b\" (UID: \"0595547b-e59e-49d0-9f2f-05103890af0b\") " Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.306492 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0595547b-e59e-49d0-9f2f-05103890af0b-host" (OuterVolumeSpecName: "host") pod "0595547b-e59e-49d0-9f2f-05103890af0b" (UID: "0595547b-e59e-49d0-9f2f-05103890af0b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.306689 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np54r\" (UniqueName: \"kubernetes.io/projected/0595547b-e59e-49d0-9f2f-05103890af0b-kube-api-access-np54r\") pod \"0595547b-e59e-49d0-9f2f-05103890af0b\" (UID: \"0595547b-e59e-49d0-9f2f-05103890af0b\") " Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.307092 5014 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0595547b-e59e-49d0-9f2f-05103890af0b-host\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.311303 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0595547b-e59e-49d0-9f2f-05103890af0b-kube-api-access-np54r" (OuterVolumeSpecName: "kube-api-access-np54r") pod "0595547b-e59e-49d0-9f2f-05103890af0b" (UID: "0595547b-e59e-49d0-9f2f-05103890af0b"). InnerVolumeSpecName "kube-api-access-np54r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.331415 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0595547b-e59e-49d0-9f2f-05103890af0b" path="/var/lib/kubelet/pods/0595547b-e59e-49d0-9f2f-05103890af0b/volumes" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.332229 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" path="/var/lib/kubelet/pods/68262d79-8bd7-4bb7-b46c-7395a5f63f6f/volumes" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.408829 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np54r\" (UniqueName: \"kubernetes.io/projected/0595547b-e59e-49d0-9f2f-05103890af0b-kube-api-access-np54r\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.655740 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-48jk6/crc-debug-7jrr4"] Dec 05 12:00:45 crc kubenswrapper[5014]: E1205 12:00:45.657491 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerName="registry-server" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.657613 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerName="registry-server" Dec 05 12:00:45 crc kubenswrapper[5014]: E1205 12:00:45.657686 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0595547b-e59e-49d0-9f2f-05103890af0b" containerName="container-00" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.657736 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="0595547b-e59e-49d0-9f2f-05103890af0b" containerName="container-00" Dec 05 12:00:45 crc kubenswrapper[5014]: E1205 12:00:45.657806 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerName="extract-utilities" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.657865 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerName="extract-utilities" Dec 05 12:00:45 crc kubenswrapper[5014]: E1205 12:00:45.657937 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerName="extract-content" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.657990 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerName="extract-content" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.658233 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="0595547b-e59e-49d0-9f2f-05103890af0b" containerName="container-00" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.658339 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="68262d79-8bd7-4bb7-b46c-7395a5f63f6f" containerName="registry-server" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.659854 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-7jrr4" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.826516 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql4z2\" (UniqueName: \"kubernetes.io/projected/51545182-fbb2-4d91-a110-2acfed5ef15a-kube-api-access-ql4z2\") pod \"crc-debug-7jrr4\" (UID: \"51545182-fbb2-4d91-a110-2acfed5ef15a\") " pod="openshift-must-gather-48jk6/crc-debug-7jrr4" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.826790 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51545182-fbb2-4d91-a110-2acfed5ef15a-host\") pod \"crc-debug-7jrr4\" (UID: \"51545182-fbb2-4d91-a110-2acfed5ef15a\") " pod="openshift-must-gather-48jk6/crc-debug-7jrr4" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.929408 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql4z2\" (UniqueName: \"kubernetes.io/projected/51545182-fbb2-4d91-a110-2acfed5ef15a-kube-api-access-ql4z2\") pod \"crc-debug-7jrr4\" (UID: \"51545182-fbb2-4d91-a110-2acfed5ef15a\") " pod="openshift-must-gather-48jk6/crc-debug-7jrr4" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.929480 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51545182-fbb2-4d91-a110-2acfed5ef15a-host\") pod \"crc-debug-7jrr4\" (UID: \"51545182-fbb2-4d91-a110-2acfed5ef15a\") " pod="openshift-must-gather-48jk6/crc-debug-7jrr4" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.929604 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51545182-fbb2-4d91-a110-2acfed5ef15a-host\") pod \"crc-debug-7jrr4\" (UID: \"51545182-fbb2-4d91-a110-2acfed5ef15a\") " pod="openshift-must-gather-48jk6/crc-debug-7jrr4" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.949716 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql4z2\" (UniqueName: \"kubernetes.io/projected/51545182-fbb2-4d91-a110-2acfed5ef15a-kube-api-access-ql4z2\") pod \"crc-debug-7jrr4\" (UID: \"51545182-fbb2-4d91-a110-2acfed5ef15a\") " pod="openshift-must-gather-48jk6/crc-debug-7jrr4" Dec 05 12:00:45 crc kubenswrapper[5014]: I1205 12:00:45.988045 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-7jrr4" Dec 05 12:00:46 crc kubenswrapper[5014]: I1205 12:00:46.092559 5014 scope.go:117] "RemoveContainer" containerID="c87be00b60cd7c9645c55dcb6540eb21209a6398a5efb45b2a9c7c910b47d861" Dec 05 12:00:46 crc kubenswrapper[5014]: I1205 12:00:46.092704 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-2sqb9" Dec 05 12:00:47 crc kubenswrapper[5014]: I1205 12:00:47.102720 5014 generic.go:334] "Generic (PLEG): container finished" podID="51545182-fbb2-4d91-a110-2acfed5ef15a" containerID="7e2eb7973a8395339d2a444c8f1d27b5eed480c7a686537b514178c3d1472a19" exitCode=0 Dec 05 12:00:47 crc kubenswrapper[5014]: I1205 12:00:47.102816 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/crc-debug-7jrr4" event={"ID":"51545182-fbb2-4d91-a110-2acfed5ef15a","Type":"ContainerDied","Data":"7e2eb7973a8395339d2a444c8f1d27b5eed480c7a686537b514178c3d1472a19"} Dec 05 12:00:47 crc kubenswrapper[5014]: I1205 12:00:47.103327 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/crc-debug-7jrr4" event={"ID":"51545182-fbb2-4d91-a110-2acfed5ef15a","Type":"ContainerStarted","Data":"7e949cd66b235619c99d6cd64fd47ad700ef7435e1cc631db1fbe18324a586bd"} Dec 05 12:00:47 crc kubenswrapper[5014]: I1205 12:00:47.149302 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-48jk6/crc-debug-7jrr4"] Dec 05 12:00:47 crc kubenswrapper[5014]: I1205 12:00:47.159532 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-48jk6/crc-debug-7jrr4"] Dec 05 12:00:48 crc kubenswrapper[5014]: I1205 12:00:48.975021 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-7jrr4" Dec 05 12:00:49 crc kubenswrapper[5014]: I1205 12:00:49.105709 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51545182-fbb2-4d91-a110-2acfed5ef15a-host\") pod \"51545182-fbb2-4d91-a110-2acfed5ef15a\" (UID: \"51545182-fbb2-4d91-a110-2acfed5ef15a\") " Dec 05 12:00:49 crc kubenswrapper[5014]: I1205 12:00:49.105844 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/51545182-fbb2-4d91-a110-2acfed5ef15a-host" (OuterVolumeSpecName: "host") pod "51545182-fbb2-4d91-a110-2acfed5ef15a" (UID: "51545182-fbb2-4d91-a110-2acfed5ef15a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:00:49 crc kubenswrapper[5014]: I1205 12:00:49.105935 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ql4z2\" (UniqueName: \"kubernetes.io/projected/51545182-fbb2-4d91-a110-2acfed5ef15a-kube-api-access-ql4z2\") pod \"51545182-fbb2-4d91-a110-2acfed5ef15a\" (UID: \"51545182-fbb2-4d91-a110-2acfed5ef15a\") " Dec 05 12:00:49 crc kubenswrapper[5014]: I1205 12:00:49.106329 5014 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51545182-fbb2-4d91-a110-2acfed5ef15a-host\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:49 crc kubenswrapper[5014]: I1205 12:00:49.115645 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51545182-fbb2-4d91-a110-2acfed5ef15a-kube-api-access-ql4z2" (OuterVolumeSpecName: "kube-api-access-ql4z2") pod "51545182-fbb2-4d91-a110-2acfed5ef15a" (UID: "51545182-fbb2-4d91-a110-2acfed5ef15a"). InnerVolumeSpecName "kube-api-access-ql4z2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:00:49 crc kubenswrapper[5014]: I1205 12:00:49.148892 5014 scope.go:117] "RemoveContainer" containerID="7e2eb7973a8395339d2a444c8f1d27b5eed480c7a686537b514178c3d1472a19" Dec 05 12:00:49 crc kubenswrapper[5014]: I1205 12:00:49.149071 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/crc-debug-7jrr4" Dec 05 12:00:49 crc kubenswrapper[5014]: I1205 12:00:49.207874 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ql4z2\" (UniqueName: \"kubernetes.io/projected/51545182-fbb2-4d91-a110-2acfed5ef15a-kube-api-access-ql4z2\") on node \"crc\" DevicePath \"\"" Dec 05 12:00:49 crc kubenswrapper[5014]: I1205 12:00:49.341147 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51545182-fbb2-4d91-a110-2acfed5ef15a" path="/var/lib/kubelet/pods/51545182-fbb2-4d91-a110-2acfed5ef15a/volumes" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.142768 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415601-h8z2m"] Dec 05 12:01:00 crc kubenswrapper[5014]: E1205 12:01:00.143627 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51545182-fbb2-4d91-a110-2acfed5ef15a" containerName="container-00" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.143644 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="51545182-fbb2-4d91-a110-2acfed5ef15a" containerName="container-00" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.143842 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="51545182-fbb2-4d91-a110-2acfed5ef15a" containerName="container-00" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.144542 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.154549 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415601-h8z2m"] Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.244632 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hkwx\" (UniqueName: \"kubernetes.io/projected/db459c84-599e-471c-a380-179c3e588272-kube-api-access-9hkwx\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.244695 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-combined-ca-bundle\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.244778 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-fernet-keys\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.244901 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-config-data\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.346873 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-fernet-keys\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.346967 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-config-data\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.347100 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hkwx\" (UniqueName: \"kubernetes.io/projected/db459c84-599e-471c-a380-179c3e588272-kube-api-access-9hkwx\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.347136 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-combined-ca-bundle\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.353536 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-combined-ca-bundle\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.354854 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-config-data\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.359138 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-fernet-keys\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.369615 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hkwx\" (UniqueName: \"kubernetes.io/projected/db459c84-599e-471c-a380-179c3e588272-kube-api-access-9hkwx\") pod \"keystone-cron-29415601-h8z2m\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:00 crc kubenswrapper[5014]: I1205 12:01:00.476222 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:02 crc kubenswrapper[5014]: I1205 12:01:02.474827 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415601-h8z2m"] Dec 05 12:01:03 crc kubenswrapper[5014]: I1205 12:01:03.108307 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415601-h8z2m" event={"ID":"db459c84-599e-471c-a380-179c3e588272","Type":"ContainerStarted","Data":"4e12985fd560770ca57ece83108d0891fb0c4cdc325da0b0ca064b37f7b8f338"} Dec 05 12:01:03 crc kubenswrapper[5014]: I1205 12:01:03.108739 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415601-h8z2m" event={"ID":"db459c84-599e-471c-a380-179c3e588272","Type":"ContainerStarted","Data":"9ec9ba33792045b61e710070b3a43db51628e0513c48ef4202cb5642ab658711"} Dec 05 12:01:03 crc kubenswrapper[5014]: I1205 12:01:03.129104 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415601-h8z2m" podStartSLOduration=3.129076412 podStartE2EDuration="3.129076412s" podCreationTimestamp="2025-12-05 12:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:01:03.126861027 +0000 UTC m=+4390.074978741" watchObservedRunningTime="2025-12-05 12:01:03.129076412 +0000 UTC m=+4390.077194116" Dec 05 12:01:05 crc kubenswrapper[5014]: I1205 12:01:05.138251 5014 generic.go:334] "Generic (PLEG): container finished" podID="db459c84-599e-471c-a380-179c3e588272" containerID="4e12985fd560770ca57ece83108d0891fb0c4cdc325da0b0ca064b37f7b8f338" exitCode=0 Dec 05 12:01:05 crc kubenswrapper[5014]: I1205 12:01:05.138324 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415601-h8z2m" event={"ID":"db459c84-599e-471c-a380-179c3e588272","Type":"ContainerDied","Data":"4e12985fd560770ca57ece83108d0891fb0c4cdc325da0b0ca064b37f7b8f338"} Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.492510 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.651233 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-combined-ca-bundle\") pod \"db459c84-599e-471c-a380-179c3e588272\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.651310 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hkwx\" (UniqueName: \"kubernetes.io/projected/db459c84-599e-471c-a380-179c3e588272-kube-api-access-9hkwx\") pod \"db459c84-599e-471c-a380-179c3e588272\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.652158 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-config-data\") pod \"db459c84-599e-471c-a380-179c3e588272\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.652229 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-fernet-keys\") pod \"db459c84-599e-471c-a380-179c3e588272\" (UID: \"db459c84-599e-471c-a380-179c3e588272\") " Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.657160 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db459c84-599e-471c-a380-179c3e588272-kube-api-access-9hkwx" (OuterVolumeSpecName: "kube-api-access-9hkwx") pod "db459c84-599e-471c-a380-179c3e588272" (UID: "db459c84-599e-471c-a380-179c3e588272"). InnerVolumeSpecName "kube-api-access-9hkwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.657211 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "db459c84-599e-471c-a380-179c3e588272" (UID: "db459c84-599e-471c-a380-179c3e588272"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.687005 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db459c84-599e-471c-a380-179c3e588272" (UID: "db459c84-599e-471c-a380-179c3e588272"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.718726 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-config-data" (OuterVolumeSpecName: "config-data") pod "db459c84-599e-471c-a380-179c3e588272" (UID: "db459c84-599e-471c-a380-179c3e588272"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.754662 5014 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.754697 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hkwx\" (UniqueName: \"kubernetes.io/projected/db459c84-599e-471c-a380-179c3e588272-kube-api-access-9hkwx\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.754710 5014 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:06 crc kubenswrapper[5014]: I1205 12:01:06.754718 5014 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/db459c84-599e-471c-a380-179c3e588272-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 12:01:07 crc kubenswrapper[5014]: I1205 12:01:07.166371 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415601-h8z2m" event={"ID":"db459c84-599e-471c-a380-179c3e588272","Type":"ContainerDied","Data":"9ec9ba33792045b61e710070b3a43db51628e0513c48ef4202cb5642ab658711"} Dec 05 12:01:07 crc kubenswrapper[5014]: I1205 12:01:07.166886 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ec9ba33792045b61e710070b3a43db51628e0513c48ef4202cb5642ab658711" Dec 05 12:01:07 crc kubenswrapper[5014]: I1205 12:01:07.166466 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415601-h8z2m" Dec 05 12:01:14 crc kubenswrapper[5014]: I1205 12:01:14.250213 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-c6dbf5d74-pbtjs_65c45e15-99d6-4c93-ae6e-67bd07e7eba9/barbican-api/0.log" Dec 05 12:01:14 crc kubenswrapper[5014]: I1205 12:01:14.374155 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-c6dbf5d74-pbtjs_65c45e15-99d6-4c93-ae6e-67bd07e7eba9/barbican-api-log/0.log" Dec 05 12:01:14 crc kubenswrapper[5014]: I1205 12:01:14.411621 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-c76ffd784-m8mzt_9cfdc764-b85e-48e8-8a0e-0945c00f278f/barbican-keystone-listener/0.log" Dec 05 12:01:14 crc kubenswrapper[5014]: I1205 12:01:14.550604 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-c76ffd784-m8mzt_9cfdc764-b85e-48e8-8a0e-0945c00f278f/barbican-keystone-listener-log/0.log" Dec 05 12:01:14 crc kubenswrapper[5014]: I1205 12:01:14.629369 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-78b5c6757c-hdtxh_7a5e0260-dfe5-4f24-82bc-e172af4db809/barbican-worker/0.log" Dec 05 12:01:14 crc kubenswrapper[5014]: I1205 12:01:14.664772 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-78b5c6757c-hdtxh_7a5e0260-dfe5-4f24-82bc-e172af4db809/barbican-worker-log/0.log" Dec 05 12:01:14 crc kubenswrapper[5014]: I1205 12:01:14.867407 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-22ghr_4fc14e6b-fae1-4d4c-96f8-f5a86422a20a/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:15 crc kubenswrapper[5014]: I1205 12:01:15.039508 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4d4ebd00-2f01-406e-9763-e4e58f33f09d/ceilometer-central-agent/0.log" Dec 05 12:01:15 crc kubenswrapper[5014]: I1205 12:01:15.170970 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4d4ebd00-2f01-406e-9763-e4e58f33f09d/ceilometer-notification-agent/0.log" Dec 05 12:01:15 crc kubenswrapper[5014]: I1205 12:01:15.223895 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4d4ebd00-2f01-406e-9763-e4e58f33f09d/proxy-httpd/0.log" Dec 05 12:01:15 crc kubenswrapper[5014]: I1205 12:01:15.277377 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4d4ebd00-2f01-406e-9763-e4e58f33f09d/sg-core/0.log" Dec 05 12:01:15 crc kubenswrapper[5014]: I1205 12:01:15.386665 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_5e322d23-65da-40e8-b814-815c148aa523/cinder-api/0.log" Dec 05 12:01:15 crc kubenswrapper[5014]: I1205 12:01:15.436801 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_5e322d23-65da-40e8-b814-815c148aa523/cinder-api-log/0.log" Dec 05 12:01:15 crc kubenswrapper[5014]: I1205 12:01:15.607645 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_934811df-aabf-44df-8b73-4612a55d73a2/cinder-scheduler/0.log" Dec 05 12:01:15 crc kubenswrapper[5014]: I1205 12:01:15.624563 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_934811df-aabf-44df-8b73-4612a55d73a2/probe/0.log" Dec 05 12:01:15 crc kubenswrapper[5014]: I1205 12:01:15.814576 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-m9t55_209c8894-646c-40b1-a33f-3890d10b3e28/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:15 crc kubenswrapper[5014]: I1205 12:01:15.834748 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-kdpm5_aa2bf2b4-c7fa-40e6-adee-d043c47760bc/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:16 crc kubenswrapper[5014]: I1205 12:01:16.038609 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-k5tgq_d8a409e0-f594-4164-950f-c1285bf165af/init/0.log" Dec 05 12:01:16 crc kubenswrapper[5014]: I1205 12:01:16.201586 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-k5tgq_d8a409e0-f594-4164-950f-c1285bf165af/init/0.log" Dec 05 12:01:16 crc kubenswrapper[5014]: I1205 12:01:16.248486 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-k5tgq_d8a409e0-f594-4164-950f-c1285bf165af/dnsmasq-dns/0.log" Dec 05 12:01:16 crc kubenswrapper[5014]: I1205 12:01:16.320992 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-q7wv6_82e883e4-b7b9-463c-99e5-ac0a855a22cd/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:16 crc kubenswrapper[5014]: I1205 12:01:16.479581 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_c59756a0-84f2-4678-9294-aaa2475d08ec/glance-log/0.log" Dec 05 12:01:16 crc kubenswrapper[5014]: I1205 12:01:16.495602 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_c59756a0-84f2-4678-9294-aaa2475d08ec/glance-httpd/0.log" Dec 05 12:01:16 crc kubenswrapper[5014]: I1205 12:01:16.676067 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_927d96cb-db91-42ec-8963-4b1259c7b65f/glance-httpd/0.log" Dec 05 12:01:16 crc kubenswrapper[5014]: I1205 12:01:16.705016 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_927d96cb-db91-42ec-8963-4b1259c7b65f/glance-log/0.log" Dec 05 12:01:16 crc kubenswrapper[5014]: I1205 12:01:16.936537 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-575d445b9b-l7wlc_b5b07bd8-c674-4647-a09b-eae67ddad491/horizon/0.log" Dec 05 12:01:16 crc kubenswrapper[5014]: I1205 12:01:16.991379 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-rbvxn_f632ba62-c6d0-4229-9d26-cf78c7738723/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:17 crc kubenswrapper[5014]: I1205 12:01:17.204678 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-575d445b9b-l7wlc_b5b07bd8-c674-4647-a09b-eae67ddad491/horizon-log/0.log" Dec 05 12:01:17 crc kubenswrapper[5014]: I1205 12:01:17.237439 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-jns8m_6df89661-8d7a-4ea6-b3ca-4560ecc324f3/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:17 crc kubenswrapper[5014]: I1205 12:01:17.396433 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415601-h8z2m_db459c84-599e-471c-a380-179c3e588272/keystone-cron/0.log" Dec 05 12:01:17 crc kubenswrapper[5014]: I1205 12:01:17.544759 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-c6c5974d5-l72zk_d9c4da24-4b94-4a9f-982f-9114df83cc67/keystone-api/0.log" Dec 05 12:01:17 crc kubenswrapper[5014]: I1205 12:01:17.627892 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_732c79b1-258d-4426-9adf-3019d0935a81/kube-state-metrics/0.log" Dec 05 12:01:17 crc kubenswrapper[5014]: I1205 12:01:17.762939 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-4gw9h_fab44f82-d30a-4bb9-b416-5ff67a5f55b6/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:18 crc kubenswrapper[5014]: I1205 12:01:18.168853 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5d74b89875-gnlqf_91f750dc-c2ab-4b76-b659-4f5e11bf2e85/neutron-httpd/0.log" Dec 05 12:01:18 crc kubenswrapper[5014]: I1205 12:01:18.174462 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5d74b89875-gnlqf_91f750dc-c2ab-4b76-b659-4f5e11bf2e85/neutron-api/0.log" Dec 05 12:01:18 crc kubenswrapper[5014]: I1205 12:01:18.287758 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-v5jzn_d0637356-0bbd-4cbb-a24b-88a27079fb82/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:18 crc kubenswrapper[5014]: I1205 12:01:18.794566 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_512551f9-cbaf-4245-9c35-68a0d6adc709/nova-api-log/0.log" Dec 05 12:01:18 crc kubenswrapper[5014]: I1205 12:01:18.903159 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_533479f0-4f9e-46b8-a2b8-b0eea26ae3bc/nova-cell0-conductor-conductor/0.log" Dec 05 12:01:19 crc kubenswrapper[5014]: I1205 12:01:19.169941 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_512551f9-cbaf-4245-9c35-68a0d6adc709/nova-api-api/0.log" Dec 05 12:01:19 crc kubenswrapper[5014]: I1205 12:01:19.488379 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_2d40af86-9a47-4de0-aa6f-a0ec696d2c23/nova-cell1-conductor-conductor/0.log" Dec 05 12:01:19 crc kubenswrapper[5014]: I1205 12:01:19.530495 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_0d4f49a8-b03d-40a0-b688-1e47556fe7b0/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 12:01:19 crc kubenswrapper[5014]: I1205 12:01:19.643334 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-hw7cx_1ef403f3-902c-41ac-874b-25627e6b5637/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:19 crc kubenswrapper[5014]: I1205 12:01:19.869373 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c0e65cd2-d320-4d94-8ea2-034e56ba5880/nova-metadata-log/0.log" Dec 05 12:01:20 crc kubenswrapper[5014]: I1205 12:01:20.215393 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_021926d7-f03a-4b1b-bcf3-bdd000b17a1e/mysql-bootstrap/0.log" Dec 05 12:01:20 crc kubenswrapper[5014]: I1205 12:01:20.264483 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_4b1e7313-1a79-42e6-b286-0046ddd16e69/nova-scheduler-scheduler/0.log" Dec 05 12:01:20 crc kubenswrapper[5014]: I1205 12:01:20.353516 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_021926d7-f03a-4b1b-bcf3-bdd000b17a1e/mysql-bootstrap/0.log" Dec 05 12:01:20 crc kubenswrapper[5014]: I1205 12:01:20.455535 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_021926d7-f03a-4b1b-bcf3-bdd000b17a1e/galera/0.log" Dec 05 12:01:20 crc kubenswrapper[5014]: I1205 12:01:20.567799 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa/mysql-bootstrap/0.log" Dec 05 12:01:21 crc kubenswrapper[5014]: I1205 12:01:21.074726 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa/mysql-bootstrap/0.log" Dec 05 12:01:21 crc kubenswrapper[5014]: I1205 12:01:21.081013 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8dfd2d83-1267-4a23-a21a-e7f0d41ec0fa/galera/0.log" Dec 05 12:01:21 crc kubenswrapper[5014]: I1205 12:01:21.240185 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_f36f2e2a-f41d-4ff2-a989-cf1fdd74cf56/openstackclient/0.log" Dec 05 12:01:21 crc kubenswrapper[5014]: I1205 12:01:21.263115 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-6wk9t_2b673e96-d37f-49d8-b3f2-c72cd66ab6db/ovn-controller/0.log" Dec 05 12:01:21 crc kubenswrapper[5014]: I1205 12:01:21.430262 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ff9qh_4b0182b9-be0f-4c4c-8d36-2d2d1e8f70ad/openstack-network-exporter/0.log" Dec 05 12:01:21 crc kubenswrapper[5014]: I1205 12:01:21.492244 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c0e65cd2-d320-4d94-8ea2-034e56ba5880/nova-metadata-metadata/0.log" Dec 05 12:01:21 crc kubenswrapper[5014]: I1205 12:01:21.660768 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mfsjr_abc53eaa-a216-4ea8-a223-4e2c79562edb/ovsdb-server-init/0.log" Dec 05 12:01:21 crc kubenswrapper[5014]: I1205 12:01:21.853016 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mfsjr_abc53eaa-a216-4ea8-a223-4e2c79562edb/ovsdb-server-init/0.log" Dec 05 12:01:21 crc kubenswrapper[5014]: I1205 12:01:21.888221 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mfsjr_abc53eaa-a216-4ea8-a223-4e2c79562edb/ovs-vswitchd/0.log" Dec 05 12:01:21 crc kubenswrapper[5014]: I1205 12:01:21.938419 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-mfsjr_abc53eaa-a216-4ea8-a223-4e2c79562edb/ovsdb-server/0.log" Dec 05 12:01:22 crc kubenswrapper[5014]: I1205 12:01:22.101655 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-dsjzk_1d432303-8ec7-44e2-8a87-d5e5c8c59979/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:22 crc kubenswrapper[5014]: I1205 12:01:22.211011 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_55bc4dc6-b48b-4963-9004-7614f65bac44/openstack-network-exporter/0.log" Dec 05 12:01:22 crc kubenswrapper[5014]: I1205 12:01:22.280540 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_55bc4dc6-b48b-4963-9004-7614f65bac44/ovn-northd/0.log" Dec 05 12:01:22 crc kubenswrapper[5014]: I1205 12:01:22.406078 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_af56d79f-8f8a-4710-96a9-7995c0a30467/openstack-network-exporter/0.log" Dec 05 12:01:22 crc kubenswrapper[5014]: I1205 12:01:22.431055 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_af56d79f-8f8a-4710-96a9-7995c0a30467/ovsdbserver-nb/0.log" Dec 05 12:01:22 crc kubenswrapper[5014]: I1205 12:01:22.609654 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b28650ad-9ebf-471c-91c9-3adef7f85d9f/openstack-network-exporter/0.log" Dec 05 12:01:22 crc kubenswrapper[5014]: I1205 12:01:22.628464 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b28650ad-9ebf-471c-91c9-3adef7f85d9f/ovsdbserver-sb/0.log" Dec 05 12:01:22 crc kubenswrapper[5014]: I1205 12:01:22.853654 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-75c6d4746d-f9vpc_81877d92-8552-4149-a92a-9a9bdfc431b4/placement-api/0.log" Dec 05 12:01:22 crc kubenswrapper[5014]: I1205 12:01:22.989339 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b16a0ee0-c10b-41b2-a636-4b066b470df6/setup-container/0.log" Dec 05 12:01:23 crc kubenswrapper[5014]: I1205 12:01:23.000315 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-75c6d4746d-f9vpc_81877d92-8552-4149-a92a-9a9bdfc431b4/placement-log/0.log" Dec 05 12:01:23 crc kubenswrapper[5014]: I1205 12:01:23.438086 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b16a0ee0-c10b-41b2-a636-4b066b470df6/setup-container/0.log" Dec 05 12:01:23 crc kubenswrapper[5014]: I1205 12:01:23.474659 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b16a0ee0-c10b-41b2-a636-4b066b470df6/rabbitmq/0.log" Dec 05 12:01:23 crc kubenswrapper[5014]: I1205 12:01:23.479939 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_46830cc1-2cdb-48ad-86a0-159b73d805c3/setup-container/0.log" Dec 05 12:01:23 crc kubenswrapper[5014]: I1205 12:01:23.765849 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_46830cc1-2cdb-48ad-86a0-159b73d805c3/setup-container/0.log" Dec 05 12:01:23 crc kubenswrapper[5014]: I1205 12:01:23.799682 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-tq2ns_9b908cdd-21cf-4f71-8bc7-83db13979563/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:23 crc kubenswrapper[5014]: I1205 12:01:23.881806 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_46830cc1-2cdb-48ad-86a0-159b73d805c3/rabbitmq/0.log" Dec 05 12:01:24 crc kubenswrapper[5014]: I1205 12:01:24.018355 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-zm7rv_6eac20dd-3e47-46e2-91fd-c684094b8d74/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:24 crc kubenswrapper[5014]: I1205 12:01:24.142363 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-xzdj9_6a880be3-7a1f-4e62-9603-9469947923ce/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:24 crc kubenswrapper[5014]: I1205 12:01:24.234167 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-bfwkr_560529fa-7baf-4bce-b55b-3816b5c7928c/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:24 crc kubenswrapper[5014]: I1205 12:01:24.374571 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-p24n8_11921594-1098-41c9-8744-7801330f646c/ssh-known-hosts-edpm-deployment/0.log" Dec 05 12:01:24 crc kubenswrapper[5014]: I1205 12:01:24.611094 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-848c5c7c55-ctrjl_36878e89-1c1c-4054-b9a5-159e056f95f4/proxy-server/0.log" Dec 05 12:01:24 crc kubenswrapper[5014]: I1205 12:01:24.733203 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-nxnqm_3d39e279-9315-4b5e-af14-ea88aef45b00/swift-ring-rebalance/0.log" Dec 05 12:01:24 crc kubenswrapper[5014]: I1205 12:01:24.745886 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-848c5c7c55-ctrjl_36878e89-1c1c-4054-b9a5-159e056f95f4/proxy-httpd/0.log" Dec 05 12:01:24 crc kubenswrapper[5014]: I1205 12:01:24.911449 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/account-auditor/0.log" Dec 05 12:01:24 crc kubenswrapper[5014]: I1205 12:01:24.963209 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/account-reaper/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.035695 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/account-replicator/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.163672 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/account-server/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.248121 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/container-replicator/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.293522 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/container-auditor/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.315612 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/container-server/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.401992 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/container-updater/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.529041 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/object-auditor/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.552860 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/object-expirer/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.559113 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/object-replicator/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.623870 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/object-server/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.942244 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/swift-recon-cron/0.log" Dec 05 12:01:25 crc kubenswrapper[5014]: I1205 12:01:25.969956 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/object-updater/0.log" Dec 05 12:01:26 crc kubenswrapper[5014]: I1205 12:01:26.022177 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_944ccaf8-60a1-4574-8dec-60c5c7ea3dcf/rsync/0.log" Dec 05 12:01:26 crc kubenswrapper[5014]: I1205 12:01:26.205002 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-q5xq6_68801bb8-5aae-4367-9c85-a1c139ab1844/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:26 crc kubenswrapper[5014]: I1205 12:01:26.270211 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_3f886993-57e9-4023-8186-8fbdeb4fe04c/tempest-tests-tempest-tests-runner/0.log" Dec 05 12:01:26 crc kubenswrapper[5014]: I1205 12:01:26.429434 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_9593349e-2888-4068-9ec8-9b7c4a154a9e/test-operator-logs-container/0.log" Dec 05 12:01:26 crc kubenswrapper[5014]: I1205 12:01:26.579111 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-jv96g_69a4f49e-1b6f-4085-81da-69b0e099b769/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 12:01:34 crc kubenswrapper[5014]: I1205 12:01:34.382476 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_ec058eab-d721-4033-b346-bddf43d1de29/memcached/0.log" Dec 05 12:01:52 crc kubenswrapper[5014]: I1205 12:01:52.926692 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/util/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.091708 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/pull/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.114100 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/util/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.147844 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/pull/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.381724 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/util/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.383002 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/extract/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.391683 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b975d6399606046f1762845d19d1c0dc235fd1f75e224180ebc3c0ac688f7j_70125d36-6327-44d7-b08e-485e002d024e/pull/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.564369 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-dtmzt_26989151-2ab4-4ae1-9d53-f9c038fba7e1/kube-rbac-proxy/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.575841 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-txc7h_fce514d3-328b-4d3f-b863-8fbb70bac467/kube-rbac-proxy/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.696218 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-txc7h_fce514d3-328b-4d3f-b863-8fbb70bac467/manager/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.780636 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-pdzgg_90daaa58-8638-46b7-9492-27f70cc124a8/kube-rbac-proxy/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.833853 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-dtmzt_26989151-2ab4-4ae1-9d53-f9c038fba7e1/manager/0.log" Dec 05 12:01:53 crc kubenswrapper[5014]: I1205 12:01:53.919762 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-pdzgg_90daaa58-8638-46b7-9492-27f70cc124a8/manager/0.log" Dec 05 12:01:54 crc kubenswrapper[5014]: I1205 12:01:54.018620 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-xvrmp_4e5afc7a-459a-4a76-bf92-fd47a823833e/kube-rbac-proxy/0.log" Dec 05 12:01:54 crc kubenswrapper[5014]: I1205 12:01:54.131619 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-xvrmp_4e5afc7a-459a-4a76-bf92-fd47a823833e/manager/0.log" Dec 05 12:01:54 crc kubenswrapper[5014]: I1205 12:01:54.259590 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-422qp_2720df25-8eec-42e6-8c03-8b9d18314712/manager/0.log" Dec 05 12:01:54 crc kubenswrapper[5014]: I1205 12:01:54.279532 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-422qp_2720df25-8eec-42e6-8c03-8b9d18314712/kube-rbac-proxy/0.log" Dec 05 12:01:54 crc kubenswrapper[5014]: I1205 12:01:54.359924 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-pdg4w_aaf5b26b-5eaf-4143-b78f-69f8c976c10a/kube-rbac-proxy/0.log" Dec 05 12:01:54 crc kubenswrapper[5014]: I1205 12:01:54.490604 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-pdg4w_aaf5b26b-5eaf-4143-b78f-69f8c976c10a/manager/0.log" Dec 05 12:01:54 crc kubenswrapper[5014]: I1205 12:01:54.556336 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-lsnnj_344c7e6d-3b0d-4874-b9f1-40b7ae307199/kube-rbac-proxy/0.log" Dec 05 12:01:54 crc kubenswrapper[5014]: I1205 12:01:54.800034 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-lsnnj_344c7e6d-3b0d-4874-b9f1-40b7ae307199/manager/0.log" Dec 05 12:01:54 crc kubenswrapper[5014]: I1205 12:01:54.824248 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-n7nfr_17fd6d59-b4b9-4dea-b697-3998c5d10976/manager/0.log" Dec 05 12:01:54 crc kubenswrapper[5014]: I1205 12:01:54.833698 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-n7nfr_17fd6d59-b4b9-4dea-b697-3998c5d10976/kube-rbac-proxy/0.log" Dec 05 12:01:55 crc kubenswrapper[5014]: I1205 12:01:55.281302 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-b9d89_02a9a463-6c8c-4771-b583-6ea38f60b446/kube-rbac-proxy/0.log" Dec 05 12:01:55 crc kubenswrapper[5014]: I1205 12:01:55.343528 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-b9d89_02a9a463-6c8c-4771-b583-6ea38f60b446/manager/0.log" Dec 05 12:01:55 crc kubenswrapper[5014]: I1205 12:01:55.488775 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-tvfjq_18a639b9-d602-4c6d-8c71-28611cbd65bf/manager/0.log" Dec 05 12:01:55 crc kubenswrapper[5014]: I1205 12:01:55.516119 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-tvfjq_18a639b9-d602-4c6d-8c71-28611cbd65bf/kube-rbac-proxy/0.log" Dec 05 12:01:55 crc kubenswrapper[5014]: I1205 12:01:55.599757 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-h9j4b_2779b764-e7b5-448c-b189-9e450b7123cb/kube-rbac-proxy/0.log" Dec 05 12:01:55 crc kubenswrapper[5014]: I1205 12:01:55.750172 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-kctmp_7a3504d5-c870-42a1-8cb4-cceed657effe/kube-rbac-proxy/0.log" Dec 05 12:01:55 crc kubenswrapper[5014]: I1205 12:01:55.769593 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-h9j4b_2779b764-e7b5-448c-b189-9e450b7123cb/manager/0.log" Dec 05 12:01:55 crc kubenswrapper[5014]: I1205 12:01:55.849945 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-kctmp_7a3504d5-c870-42a1-8cb4-cceed657effe/manager/0.log" Dec 05 12:01:56 crc kubenswrapper[5014]: I1205 12:01:56.048756 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-t8lzs_355e95da-4f3d-4dce-b35e-79162bedce09/kube-rbac-proxy/0.log" Dec 05 12:01:56 crc kubenswrapper[5014]: I1205 12:01:56.097719 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-t8lzs_355e95da-4f3d-4dce-b35e-79162bedce09/manager/0.log" Dec 05 12:01:56 crc kubenswrapper[5014]: I1205 12:01:56.206552 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-v2hdr_5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37/kube-rbac-proxy/0.log" Dec 05 12:01:56 crc kubenswrapper[5014]: I1205 12:01:56.291264 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-v2hdr_5eb6b6bd-ee7f-4171-b8bf-c6fa71c35f37/manager/0.log" Dec 05 12:01:56 crc kubenswrapper[5014]: I1205 12:01:56.366156 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd_2fba4b2b-28c2-41b6-86a8-7bb26b432f71/kube-rbac-proxy/0.log" Dec 05 12:01:56 crc kubenswrapper[5014]: I1205 12:01:56.446805 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4nrpkd_2fba4b2b-28c2-41b6-86a8-7bb26b432f71/manager/0.log" Dec 05 12:01:56 crc kubenswrapper[5014]: I1205 12:01:56.863816 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-58785945fc-gt9n2_bdd8a367-e716-47ca-99d2-4b9fe9af1f6e/operator/0.log" Dec 05 12:01:56 crc kubenswrapper[5014]: I1205 12:01:56.873720 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-snr6z_f1566577-7102-49a6-a5b8-d27f4b03e350/registry-server/0.log" Dec 05 12:01:57 crc kubenswrapper[5014]: I1205 12:01:57.095081 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-svxkx_e3bb4ae5-8495-40c1-9a07-affdc714ebe0/kube-rbac-proxy/0.log" Dec 05 12:01:57 crc kubenswrapper[5014]: I1205 12:01:57.216867 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-svxkx_e3bb4ae5-8495-40c1-9a07-affdc714ebe0/manager/0.log" Dec 05 12:01:57 crc kubenswrapper[5014]: I1205 12:01:57.222292 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-dbxkc_eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47/kube-rbac-proxy/0.log" Dec 05 12:01:57 crc kubenswrapper[5014]: I1205 12:01:57.373810 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-dbxkc_eabeab6b-00e0-4f5c-a2b9-5c6b6e99ab47/manager/0.log" Dec 05 12:01:57 crc kubenswrapper[5014]: I1205 12:01:57.429890 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-hppvs_fce76d80-94e7-4c38-93c0-044691915f03/operator/0.log" Dec 05 12:01:57 crc kubenswrapper[5014]: I1205 12:01:57.614927 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-69b6fcdff-tzs9c_ddc5d07f-9748-41de-82c4-cf52f02063ac/manager/0.log" Dec 05 12:01:57 crc kubenswrapper[5014]: I1205 12:01:57.651813 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-l9gtg_831cc4a4-0997-4669-8c6d-9dbd8eaea14e/kube-rbac-proxy/0.log" Dec 05 12:01:57 crc kubenswrapper[5014]: I1205 12:01:57.717468 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-l9gtg_831cc4a4-0997-4669-8c6d-9dbd8eaea14e/manager/0.log" Dec 05 12:01:57 crc kubenswrapper[5014]: I1205 12:01:57.901735 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-48xdk_444b1e62-4d81-4e12-8110-9b5f680b3336/kube-rbac-proxy/0.log" Dec 05 12:01:58 crc kubenswrapper[5014]: I1205 12:01:58.006204 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-48xdk_444b1e62-4d81-4e12-8110-9b5f680b3336/manager/0.log" Dec 05 12:01:58 crc kubenswrapper[5014]: I1205 12:01:58.018060 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-fn75n_6cb20401-6c79-43c4-a649-c1df07de148a/manager/0.log" Dec 05 12:01:58 crc kubenswrapper[5014]: I1205 12:01:58.067654 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-fn75n_6cb20401-6c79-43c4-a649-c1df07de148a/kube-rbac-proxy/0.log" Dec 05 12:01:58 crc kubenswrapper[5014]: I1205 12:01:58.208879 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-brstb_3948de6a-fa93-4223-bda4-73afc54cc63c/manager/0.log" Dec 05 12:01:58 crc kubenswrapper[5014]: I1205 12:01:58.217809 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-brstb_3948de6a-fa93-4223-bda4-73afc54cc63c/kube-rbac-proxy/0.log" Dec 05 12:02:19 crc kubenswrapper[5014]: I1205 12:02:19.068998 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-tsztt_9357b561-29c1-4fb1-9004-8bf8378aad02/control-plane-machine-set-operator/0.log" Dec 05 12:02:19 crc kubenswrapper[5014]: I1205 12:02:19.247028 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-b2znf_ad71de77-0b33-48ff-86d1-87235f83b4bf/machine-api-operator/0.log" Dec 05 12:02:19 crc kubenswrapper[5014]: I1205 12:02:19.256586 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-b2znf_ad71de77-0b33-48ff-86d1-87235f83b4bf/kube-rbac-proxy/0.log" Dec 05 12:02:32 crc kubenswrapper[5014]: I1205 12:02:32.237948 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-kk69q_08898b10-ad0c-4b34-bc40-49a86e6da919/cert-manager-controller/0.log" Dec 05 12:02:32 crc kubenswrapper[5014]: I1205 12:02:32.491074 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-vjh5c_ef8d817f-5b79-4efc-aec5-cf9f4133b0e2/cert-manager-cainjector/0.log" Dec 05 12:02:32 crc kubenswrapper[5014]: I1205 12:02:32.522023 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-qhbsj_43ed4c76-e0f2-4016-8e33-ab3498c5268c/cert-manager-webhook/0.log" Dec 05 12:02:46 crc kubenswrapper[5014]: I1205 12:02:46.005022 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-fzxsv_043084bc-abeb-4cb7-bea1-7dae70ac655d/nmstate-console-plugin/0.log" Dec 05 12:02:46 crc kubenswrapper[5014]: I1205 12:02:46.155019 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-44tzw_a9df2f02-fdb6-46dc-bd30-25b7b4a2d357/nmstate-handler/0.log" Dec 05 12:02:46 crc kubenswrapper[5014]: I1205 12:02:46.239080 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qkl7q_47da56e6-6794-48dc-a7e6-99e6b63ecf43/kube-rbac-proxy/0.log" Dec 05 12:02:46 crc kubenswrapper[5014]: I1205 12:02:46.263321 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qkl7q_47da56e6-6794-48dc-a7e6-99e6b63ecf43/nmstate-metrics/0.log" Dec 05 12:02:46 crc kubenswrapper[5014]: I1205 12:02:46.437049 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-vvdv5_8d7613b6-e062-416c-87e5-428a84a9d24f/nmstate-operator/0.log" Dec 05 12:02:46 crc kubenswrapper[5014]: I1205 12:02:46.547216 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-rnnzf_f73b5791-5c0a-4c9f-a78f-9ed2615f4538/nmstate-webhook/0.log" Dec 05 12:03:02 crc kubenswrapper[5014]: I1205 12:03:02.761972 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-26kt6_6fd388fd-a96d-4997-b3b7-9fef3d7130b7/kube-rbac-proxy/0.log" Dec 05 12:03:02 crc kubenswrapper[5014]: I1205 12:03:02.879972 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-26kt6_6fd388fd-a96d-4997-b3b7-9fef3d7130b7/controller/0.log" Dec 05 12:03:02 crc kubenswrapper[5014]: I1205 12:03:02.936676 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:03:02 crc kubenswrapper[5014]: I1205 12:03:02.936730 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:03:02 crc kubenswrapper[5014]: I1205 12:03:02.981976 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-frr-files/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.175825 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-frr-files/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.179803 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-reloader/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.189364 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-reloader/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.192062 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-metrics/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.417577 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-metrics/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.422031 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-reloader/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.433751 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-frr-files/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.478489 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-metrics/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.607366 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-reloader/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.622406 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-frr-files/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.664089 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/controller/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.692372 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/cp-metrics/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.858050 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/frr-metrics/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.868751 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/kube-rbac-proxy/0.log" Dec 05 12:03:03 crc kubenswrapper[5014]: I1205 12:03:03.910166 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/kube-rbac-proxy-frr/0.log" Dec 05 12:03:04 crc kubenswrapper[5014]: I1205 12:03:04.094477 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/reloader/0.log" Dec 05 12:03:04 crc kubenswrapper[5014]: I1205 12:03:04.106929 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-8xg84_cd82d1e5-3ac0-4669-a192-3b8bbf071ad5/frr-k8s-webhook-server/0.log" Dec 05 12:03:04 crc kubenswrapper[5014]: I1205 12:03:04.338881 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-54df4ff95d-j68c8_2a78d9b2-16fa-4586-86cf-96397edefe00/manager/0.log" Dec 05 12:03:04 crc kubenswrapper[5014]: I1205 12:03:04.561793 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5f44fbc487-54m6r_bb726700-5715-4a97-92c4-f8a50a0922bb/webhook-server/0.log" Dec 05 12:03:04 crc kubenswrapper[5014]: I1205 12:03:04.622191 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-qr2zx_0cc6b871-45be-4887-a73b-a2fe99989d41/kube-rbac-proxy/0.log" Dec 05 12:03:05 crc kubenswrapper[5014]: I1205 12:03:05.050144 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mwbcz_d67c4f1f-c7c8-4c51-ac84-3bf0261e4660/frr/0.log" Dec 05 12:03:05 crc kubenswrapper[5014]: I1205 12:03:05.210609 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-qr2zx_0cc6b871-45be-4887-a73b-a2fe99989d41/speaker/0.log" Dec 05 12:03:19 crc kubenswrapper[5014]: I1205 12:03:19.560401 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/util/0.log" Dec 05 12:03:19 crc kubenswrapper[5014]: I1205 12:03:19.689136 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/pull/0.log" Dec 05 12:03:19 crc kubenswrapper[5014]: I1205 12:03:19.693143 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/util/0.log" Dec 05 12:03:19 crc kubenswrapper[5014]: I1205 12:03:19.721421 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/pull/0.log" Dec 05 12:03:19 crc kubenswrapper[5014]: I1205 12:03:19.922834 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/pull/0.log" Dec 05 12:03:19 crc kubenswrapper[5014]: I1205 12:03:19.932392 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/util/0.log" Dec 05 12:03:19 crc kubenswrapper[5014]: I1205 12:03:19.967454 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fxv4m4_aea811f6-366f-49c8-853b-ae13ea1a6e2d/extract/0.log" Dec 05 12:03:20 crc kubenswrapper[5014]: I1205 12:03:20.159000 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/util/0.log" Dec 05 12:03:20 crc kubenswrapper[5014]: I1205 12:03:20.353462 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/util/0.log" Dec 05 12:03:20 crc kubenswrapper[5014]: I1205 12:03:20.363125 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/pull/0.log" Dec 05 12:03:20 crc kubenswrapper[5014]: I1205 12:03:20.453847 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/pull/0.log" Dec 05 12:03:20 crc kubenswrapper[5014]: I1205 12:03:20.553416 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/util/0.log" Dec 05 12:03:20 crc kubenswrapper[5014]: I1205 12:03:20.574106 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/pull/0.log" Dec 05 12:03:20 crc kubenswrapper[5014]: I1205 12:03:20.590264 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83kr8nb_9cfd97f5-cbf6-45a9-99c9-4d10cc699bd5/extract/0.log" Dec 05 12:03:20 crc kubenswrapper[5014]: I1205 12:03:20.730247 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-utilities/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.016447 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-utilities/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.038196 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-content/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.058723 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-content/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.221675 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-utilities/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.226984 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/extract-content/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.490204 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-utilities/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.658782 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-content/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.692737 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-utilities/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.755056 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-content/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.860965 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-98n4c_5b896048-86fc-4051-8a7d-5289365af88d/registry-server/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.982431 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-content/0.log" Dec 05 12:03:21 crc kubenswrapper[5014]: I1205 12:03:21.983186 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/extract-utilities/0.log" Dec 05 12:03:22 crc kubenswrapper[5014]: I1205 12:03:22.207642 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-blg9z_6324df91-5676-4d76-969c-ed24a6f6d7bf/marketplace-operator/0.log" Dec 05 12:03:22 crc kubenswrapper[5014]: I1205 12:03:22.359077 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-utilities/0.log" Dec 05 12:03:22 crc kubenswrapper[5014]: I1205 12:03:22.551689 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-content/0.log" Dec 05 12:03:22 crc kubenswrapper[5014]: I1205 12:03:22.597462 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-utilities/0.log" Dec 05 12:03:22 crc kubenswrapper[5014]: I1205 12:03:22.659437 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-content/0.log" Dec 05 12:03:22 crc kubenswrapper[5014]: I1205 12:03:22.781701 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-28kj9_142ed841-421a-47b3-ad45-bc061c5f8e26/registry-server/0.log" Dec 05 12:03:22 crc kubenswrapper[5014]: I1205 12:03:22.885680 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-utilities/0.log" Dec 05 12:03:22 crc kubenswrapper[5014]: I1205 12:03:22.926381 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/registry-server/0.log" Dec 05 12:03:22 crc kubenswrapper[5014]: I1205 12:03:22.927948 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zj7m7_83cbcedb-43f2-4a1f-aaeb-25a276eeb253/extract-content/0.log" Dec 05 12:03:23 crc kubenswrapper[5014]: I1205 12:03:23.000261 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-utilities/0.log" Dec 05 12:03:23 crc kubenswrapper[5014]: I1205 12:03:23.186089 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-content/0.log" Dec 05 12:03:23 crc kubenswrapper[5014]: I1205 12:03:23.194860 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-utilities/0.log" Dec 05 12:03:23 crc kubenswrapper[5014]: I1205 12:03:23.375715 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-content/0.log" Dec 05 12:03:23 crc kubenswrapper[5014]: I1205 12:03:23.494566 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-content/0.log" Dec 05 12:03:23 crc kubenswrapper[5014]: I1205 12:03:23.568445 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/extract-utilities/0.log" Dec 05 12:03:24 crc kubenswrapper[5014]: I1205 12:03:24.074701 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r2n62_39b91742-73f1-4264-842e-3429afa2bbc6/registry-server/0.log" Dec 05 12:03:32 crc kubenswrapper[5014]: I1205 12:03:32.936599 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:03:32 crc kubenswrapper[5014]: I1205 12:03:32.937153 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:04:02 crc kubenswrapper[5014]: I1205 12:04:02.937676 5014 patch_prober.go:28] interesting pod/machine-config-daemon-cvtv5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:04:02 crc kubenswrapper[5014]: I1205 12:04:02.938323 5014 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:04:02 crc kubenswrapper[5014]: I1205 12:04:02.938386 5014 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" Dec 05 12:04:02 crc kubenswrapper[5014]: I1205 12:04:02.939417 5014 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea"} pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:04:02 crc kubenswrapper[5014]: I1205 12:04:02.939511 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" containerName="machine-config-daemon" containerID="cri-o://ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" gracePeriod=600 Dec 05 12:04:03 crc kubenswrapper[5014]: E1205 12:04:03.073696 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:04:03 crc kubenswrapper[5014]: I1205 12:04:03.812731 5014 generic.go:334] "Generic (PLEG): container finished" podID="0c07b133-0b3c-4d10-95f9-23167e184681" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" exitCode=0 Dec 05 12:04:03 crc kubenswrapper[5014]: I1205 12:04:03.812823 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerDied","Data":"ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea"} Dec 05 12:04:03 crc kubenswrapper[5014]: I1205 12:04:03.813162 5014 scope.go:117] "RemoveContainer" containerID="1ac388921aa80740568e587b3d1015f94de996983c82d0ab08892e7a174f4b6e" Dec 05 12:04:03 crc kubenswrapper[5014]: I1205 12:04:03.815168 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:04:03 crc kubenswrapper[5014]: E1205 12:04:03.815714 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:04:16 crc kubenswrapper[5014]: I1205 12:04:16.319735 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:04:16 crc kubenswrapper[5014]: E1205 12:04:16.320540 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:04:21 crc kubenswrapper[5014]: I1205 12:04:21.871459 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5qp7f"] Dec 05 12:04:21 crc kubenswrapper[5014]: E1205 12:04:21.872981 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db459c84-599e-471c-a380-179c3e588272" containerName="keystone-cron" Dec 05 12:04:21 crc kubenswrapper[5014]: I1205 12:04:21.873014 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="db459c84-599e-471c-a380-179c3e588272" containerName="keystone-cron" Dec 05 12:04:21 crc kubenswrapper[5014]: I1205 12:04:21.873704 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="db459c84-599e-471c-a380-179c3e588272" containerName="keystone-cron" Dec 05 12:04:21 crc kubenswrapper[5014]: I1205 12:04:21.876541 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:21 crc kubenswrapper[5014]: I1205 12:04:21.898449 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5qp7f"] Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.046386 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtdbx\" (UniqueName: \"kubernetes.io/projected/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-kube-api-access-dtdbx\") pod \"redhat-operators-5qp7f\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.046456 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-utilities\") pod \"redhat-operators-5qp7f\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.046779 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-catalog-content\") pod \"redhat-operators-5qp7f\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.148873 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-catalog-content\") pod \"redhat-operators-5qp7f\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.149005 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtdbx\" (UniqueName: \"kubernetes.io/projected/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-kube-api-access-dtdbx\") pod \"redhat-operators-5qp7f\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.149042 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-utilities\") pod \"redhat-operators-5qp7f\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.149595 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-utilities\") pod \"redhat-operators-5qp7f\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.149604 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-catalog-content\") pod \"redhat-operators-5qp7f\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.169318 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtdbx\" (UniqueName: \"kubernetes.io/projected/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-kube-api-access-dtdbx\") pod \"redhat-operators-5qp7f\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.196883 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:22 crc kubenswrapper[5014]: I1205 12:04:22.730085 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5qp7f"] Dec 05 12:04:23 crc kubenswrapper[5014]: I1205 12:04:23.011347 5014 generic.go:334] "Generic (PLEG): container finished" podID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerID="889a6333526de96565ffe2858cc99d79d2d94296bea5386111eefea3fcb60a3b" exitCode=0 Dec 05 12:04:23 crc kubenswrapper[5014]: I1205 12:04:23.011495 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5qp7f" event={"ID":"4d31da3a-8528-4cf1-8c48-74ddb5af11fc","Type":"ContainerDied","Data":"889a6333526de96565ffe2858cc99d79d2d94296bea5386111eefea3fcb60a3b"} Dec 05 12:04:23 crc kubenswrapper[5014]: I1205 12:04:23.011652 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5qp7f" event={"ID":"4d31da3a-8528-4cf1-8c48-74ddb5af11fc","Type":"ContainerStarted","Data":"cd9dd3a4e32b19f85e8c436fbd9c34835afaebc2cbd4dd3a8a212255e4ce2c25"} Dec 05 12:04:23 crc kubenswrapper[5014]: I1205 12:04:23.013102 5014 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:04:25 crc kubenswrapper[5014]: I1205 12:04:25.037847 5014 generic.go:334] "Generic (PLEG): container finished" podID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerID="ada060e5cac80aa54dfba86b8a76496eca7e4c84789649bfc513fca07a94d00e" exitCode=0 Dec 05 12:04:25 crc kubenswrapper[5014]: I1205 12:04:25.037967 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5qp7f" event={"ID":"4d31da3a-8528-4cf1-8c48-74ddb5af11fc","Type":"ContainerDied","Data":"ada060e5cac80aa54dfba86b8a76496eca7e4c84789649bfc513fca07a94d00e"} Dec 05 12:04:26 crc kubenswrapper[5014]: I1205 12:04:26.053292 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5qp7f" event={"ID":"4d31da3a-8528-4cf1-8c48-74ddb5af11fc","Type":"ContainerStarted","Data":"1bfdcbb789c2b1b229d12a82e1ef976ac0f15419f1236dfd8375533ffbaac1e1"} Dec 05 12:04:26 crc kubenswrapper[5014]: I1205 12:04:26.085655 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5qp7f" podStartSLOduration=2.663817002 podStartE2EDuration="5.085629845s" podCreationTimestamp="2025-12-05 12:04:21 +0000 UTC" firstStartedPulling="2025-12-05 12:04:23.01283114 +0000 UTC m=+4589.960948844" lastFinishedPulling="2025-12-05 12:04:25.434643973 +0000 UTC m=+4592.382761687" observedRunningTime="2025-12-05 12:04:26.078630034 +0000 UTC m=+4593.026747748" watchObservedRunningTime="2025-12-05 12:04:26.085629845 +0000 UTC m=+4593.033747559" Dec 05 12:04:27 crc kubenswrapper[5014]: I1205 12:04:27.317975 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:04:27 crc kubenswrapper[5014]: E1205 12:04:27.318249 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:04:32 crc kubenswrapper[5014]: I1205 12:04:32.197973 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:32 crc kubenswrapper[5014]: I1205 12:04:32.198602 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:32 crc kubenswrapper[5014]: I1205 12:04:32.279160 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:33 crc kubenswrapper[5014]: I1205 12:04:33.215481 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:33 crc kubenswrapper[5014]: I1205 12:04:33.276748 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5qp7f"] Dec 05 12:04:35 crc kubenswrapper[5014]: I1205 12:04:35.160138 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5qp7f" podUID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerName="registry-server" containerID="cri-o://1bfdcbb789c2b1b229d12a82e1ef976ac0f15419f1236dfd8375533ffbaac1e1" gracePeriod=2 Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.210500 5014 generic.go:334] "Generic (PLEG): container finished" podID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerID="1bfdcbb789c2b1b229d12a82e1ef976ac0f15419f1236dfd8375533ffbaac1e1" exitCode=0 Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.210634 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5qp7f" event={"ID":"4d31da3a-8528-4cf1-8c48-74ddb5af11fc","Type":"ContainerDied","Data":"1bfdcbb789c2b1b229d12a82e1ef976ac0f15419f1236dfd8375533ffbaac1e1"} Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.376960 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.507680 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-catalog-content\") pod \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.507859 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-utilities\") pod \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.507895 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtdbx\" (UniqueName: \"kubernetes.io/projected/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-kube-api-access-dtdbx\") pod \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\" (UID: \"4d31da3a-8528-4cf1-8c48-74ddb5af11fc\") " Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.509749 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-utilities" (OuterVolumeSpecName: "utilities") pod "4d31da3a-8528-4cf1-8c48-74ddb5af11fc" (UID: "4d31da3a-8528-4cf1-8c48-74ddb5af11fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.610461 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.610494 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d31da3a-8528-4cf1-8c48-74ddb5af11fc" (UID: "4d31da3a-8528-4cf1-8c48-74ddb5af11fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.713225 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.873785 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-kube-api-access-dtdbx" (OuterVolumeSpecName: "kube-api-access-dtdbx") pod "4d31da3a-8528-4cf1-8c48-74ddb5af11fc" (UID: "4d31da3a-8528-4cf1-8c48-74ddb5af11fc"). InnerVolumeSpecName "kube-api-access-dtdbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:04:38 crc kubenswrapper[5014]: I1205 12:04:38.916763 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtdbx\" (UniqueName: \"kubernetes.io/projected/4d31da3a-8528-4cf1-8c48-74ddb5af11fc-kube-api-access-dtdbx\") on node \"crc\" DevicePath \"\"" Dec 05 12:04:39 crc kubenswrapper[5014]: I1205 12:04:39.241206 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5qp7f" event={"ID":"4d31da3a-8528-4cf1-8c48-74ddb5af11fc","Type":"ContainerDied","Data":"cd9dd3a4e32b19f85e8c436fbd9c34835afaebc2cbd4dd3a8a212255e4ce2c25"} Dec 05 12:04:39 crc kubenswrapper[5014]: I1205 12:04:39.241261 5014 scope.go:117] "RemoveContainer" containerID="1bfdcbb789c2b1b229d12a82e1ef976ac0f15419f1236dfd8375533ffbaac1e1" Dec 05 12:04:39 crc kubenswrapper[5014]: I1205 12:04:39.241258 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5qp7f" Dec 05 12:04:39 crc kubenswrapper[5014]: I1205 12:04:39.287038 5014 scope.go:117] "RemoveContainer" containerID="ada060e5cac80aa54dfba86b8a76496eca7e4c84789649bfc513fca07a94d00e" Dec 05 12:04:39 crc kubenswrapper[5014]: I1205 12:04:39.291545 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5qp7f"] Dec 05 12:04:39 crc kubenswrapper[5014]: I1205 12:04:39.303620 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5qp7f"] Dec 05 12:04:39 crc kubenswrapper[5014]: I1205 12:04:39.318714 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:04:39 crc kubenswrapper[5014]: E1205 12:04:39.319089 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:04:39 crc kubenswrapper[5014]: I1205 12:04:39.332393 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" path="/var/lib/kubelet/pods/4d31da3a-8528-4cf1-8c48-74ddb5af11fc/volumes" Dec 05 12:04:39 crc kubenswrapper[5014]: I1205 12:04:39.333326 5014 scope.go:117] "RemoveContainer" containerID="889a6333526de96565ffe2858cc99d79d2d94296bea5386111eefea3fcb60a3b" Dec 05 12:04:51 crc kubenswrapper[5014]: I1205 12:04:51.318782 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:04:51 crc kubenswrapper[5014]: E1205 12:04:51.319966 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:05:02 crc kubenswrapper[5014]: I1205 12:05:02.320899 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:05:02 crc kubenswrapper[5014]: E1205 12:05:02.321830 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:05:03 crc kubenswrapper[5014]: I1205 12:05:03.481653 5014 generic.go:334] "Generic (PLEG): container finished" podID="8d7c7d58-bc06-4985-97da-55244710418f" containerID="4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a" exitCode=0 Dec 05 12:05:03 crc kubenswrapper[5014]: I1205 12:05:03.481705 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-48jk6/must-gather-flvrt" event={"ID":"8d7c7d58-bc06-4985-97da-55244710418f","Type":"ContainerDied","Data":"4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a"} Dec 05 12:05:03 crc kubenswrapper[5014]: I1205 12:05:03.482399 5014 scope.go:117] "RemoveContainer" containerID="4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a" Dec 05 12:05:03 crc kubenswrapper[5014]: I1205 12:05:03.813369 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-48jk6_must-gather-flvrt_8d7c7d58-bc06-4985-97da-55244710418f/gather/0.log" Dec 05 12:05:14 crc kubenswrapper[5014]: I1205 12:05:14.319150 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:05:14 crc kubenswrapper[5014]: E1205 12:05:14.320219 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:05:14 crc kubenswrapper[5014]: I1205 12:05:14.787738 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-48jk6/must-gather-flvrt"] Dec 05 12:05:14 crc kubenswrapper[5014]: I1205 12:05:14.787993 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-48jk6/must-gather-flvrt" podUID="8d7c7d58-bc06-4985-97da-55244710418f" containerName="copy" containerID="cri-o://a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5" gracePeriod=2 Dec 05 12:05:14 crc kubenswrapper[5014]: I1205 12:05:14.803365 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-48jk6/must-gather-flvrt"] Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.317523 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-48jk6_must-gather-flvrt_8d7c7d58-bc06-4985-97da-55244710418f/copy/0.log" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.322047 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/must-gather-flvrt" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.463140 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgk2n\" (UniqueName: \"kubernetes.io/projected/8d7c7d58-bc06-4985-97da-55244710418f-kube-api-access-xgk2n\") pod \"8d7c7d58-bc06-4985-97da-55244710418f\" (UID: \"8d7c7d58-bc06-4985-97da-55244710418f\") " Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.463399 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8d7c7d58-bc06-4985-97da-55244710418f-must-gather-output\") pod \"8d7c7d58-bc06-4985-97da-55244710418f\" (UID: \"8d7c7d58-bc06-4985-97da-55244710418f\") " Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.484574 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d7c7d58-bc06-4985-97da-55244710418f-kube-api-access-xgk2n" (OuterVolumeSpecName: "kube-api-access-xgk2n") pod "8d7c7d58-bc06-4985-97da-55244710418f" (UID: "8d7c7d58-bc06-4985-97da-55244710418f"). InnerVolumeSpecName "kube-api-access-xgk2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.566398 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgk2n\" (UniqueName: \"kubernetes.io/projected/8d7c7d58-bc06-4985-97da-55244710418f-kube-api-access-xgk2n\") on node \"crc\" DevicePath \"\"" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.596479 5014 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-48jk6_must-gather-flvrt_8d7c7d58-bc06-4985-97da-55244710418f/copy/0.log" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.596866 5014 generic.go:334] "Generic (PLEG): container finished" podID="8d7c7d58-bc06-4985-97da-55244710418f" containerID="a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5" exitCode=143 Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.596926 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-48jk6/must-gather-flvrt" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.596930 5014 scope.go:117] "RemoveContainer" containerID="a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.618482 5014 scope.go:117] "RemoveContainer" containerID="4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.638555 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d7c7d58-bc06-4985-97da-55244710418f-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "8d7c7d58-bc06-4985-97da-55244710418f" (UID: "8d7c7d58-bc06-4985-97da-55244710418f"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.667721 5014 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8d7c7d58-bc06-4985-97da-55244710418f-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.696188 5014 scope.go:117] "RemoveContainer" containerID="a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5" Dec 05 12:05:15 crc kubenswrapper[5014]: E1205 12:05:15.696603 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5\": container with ID starting with a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5 not found: ID does not exist" containerID="a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.696648 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5"} err="failed to get container status \"a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5\": rpc error: code = NotFound desc = could not find container \"a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5\": container with ID starting with a19ece9eea28be655667b8659f3451086aa2ad5827d7a21439d189751edc4db5 not found: ID does not exist" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.696671 5014 scope.go:117] "RemoveContainer" containerID="4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a" Dec 05 12:05:15 crc kubenswrapper[5014]: E1205 12:05:15.697056 5014 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a\": container with ID starting with 4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a not found: ID does not exist" containerID="4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a" Dec 05 12:05:15 crc kubenswrapper[5014]: I1205 12:05:15.697080 5014 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a"} err="failed to get container status \"4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a\": rpc error: code = NotFound desc = could not find container \"4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a\": container with ID starting with 4d2c12b270b7722c252d7971d2fd61e0d93cb1e8f85cdb977c0a2fac9adbf85a not found: ID does not exist" Dec 05 12:05:17 crc kubenswrapper[5014]: I1205 12:05:17.328556 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d7c7d58-bc06-4985-97da-55244710418f" path="/var/lib/kubelet/pods/8d7c7d58-bc06-4985-97da-55244710418f/volumes" Dec 05 12:05:27 crc kubenswrapper[5014]: I1205 12:05:27.318446 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:05:27 crc kubenswrapper[5014]: E1205 12:05:27.319490 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:05:42 crc kubenswrapper[5014]: I1205 12:05:42.318459 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:05:42 crc kubenswrapper[5014]: E1205 12:05:42.319642 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:05:53 crc kubenswrapper[5014]: I1205 12:05:53.324498 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:05:53 crc kubenswrapper[5014]: E1205 12:05:53.325513 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:06:07 crc kubenswrapper[5014]: I1205 12:06:07.029642 5014 scope.go:117] "RemoveContainer" containerID="1f520a8510a30793f9d2a83a24e0bb0252dcb43e2e387841e16bab0e3ea54f17" Dec 05 12:06:07 crc kubenswrapper[5014]: I1205 12:06:07.319694 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:06:07 crc kubenswrapper[5014]: E1205 12:06:07.320546 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:06:18 crc kubenswrapper[5014]: I1205 12:06:18.318003 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:06:18 crc kubenswrapper[5014]: E1205 12:06:18.319058 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:06:33 crc kubenswrapper[5014]: I1205 12:06:33.325397 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:06:33 crc kubenswrapper[5014]: E1205 12:06:33.326355 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.734411 5014 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-57st4"] Dec 05 12:06:40 crc kubenswrapper[5014]: E1205 12:06:40.735116 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d7c7d58-bc06-4985-97da-55244710418f" containerName="gather" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.735129 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d7c7d58-bc06-4985-97da-55244710418f" containerName="gather" Dec 05 12:06:40 crc kubenswrapper[5014]: E1205 12:06:40.735140 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerName="registry-server" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.735147 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerName="registry-server" Dec 05 12:06:40 crc kubenswrapper[5014]: E1205 12:06:40.735176 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerName="extract-content" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.735184 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerName="extract-content" Dec 05 12:06:40 crc kubenswrapper[5014]: E1205 12:06:40.735199 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d7c7d58-bc06-4985-97da-55244710418f" containerName="copy" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.735210 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d7c7d58-bc06-4985-97da-55244710418f" containerName="copy" Dec 05 12:06:40 crc kubenswrapper[5014]: E1205 12:06:40.735225 5014 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerName="extract-utilities" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.735232 5014 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerName="extract-utilities" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.735581 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d7c7d58-bc06-4985-97da-55244710418f" containerName="gather" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.735611 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d31da3a-8528-4cf1-8c48-74ddb5af11fc" containerName="registry-server" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.735628 5014 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d7c7d58-bc06-4985-97da-55244710418f" containerName="copy" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.737109 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.748219 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-57st4"] Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.873200 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-catalog-content\") pod \"redhat-marketplace-57st4\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.873302 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdxr9\" (UniqueName: \"kubernetes.io/projected/ab0e380d-021e-449b-995d-36648612a6b7-kube-api-access-kdxr9\") pod \"redhat-marketplace-57st4\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.873412 5014 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-utilities\") pod \"redhat-marketplace-57st4\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.975422 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdxr9\" (UniqueName: \"kubernetes.io/projected/ab0e380d-021e-449b-995d-36648612a6b7-kube-api-access-kdxr9\") pod \"redhat-marketplace-57st4\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.975580 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-utilities\") pod \"redhat-marketplace-57st4\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.975634 5014 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-catalog-content\") pod \"redhat-marketplace-57st4\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.976186 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-catalog-content\") pod \"redhat-marketplace-57st4\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:40 crc kubenswrapper[5014]: I1205 12:06:40.976291 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-utilities\") pod \"redhat-marketplace-57st4\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:41 crc kubenswrapper[5014]: I1205 12:06:41.001484 5014 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdxr9\" (UniqueName: \"kubernetes.io/projected/ab0e380d-021e-449b-995d-36648612a6b7-kube-api-access-kdxr9\") pod \"redhat-marketplace-57st4\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:41 crc kubenswrapper[5014]: I1205 12:06:41.059462 5014 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:41 crc kubenswrapper[5014]: I1205 12:06:41.637377 5014 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-57st4"] Dec 05 12:06:42 crc kubenswrapper[5014]: I1205 12:06:42.406079 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57st4" event={"ID":"ab0e380d-021e-449b-995d-36648612a6b7","Type":"ContainerStarted","Data":"7481ce399758b246465278248d809f8096dc49d26bb65a28043e743118d1e77d"} Dec 05 12:06:42 crc kubenswrapper[5014]: I1205 12:06:42.406454 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57st4" event={"ID":"ab0e380d-021e-449b-995d-36648612a6b7","Type":"ContainerStarted","Data":"86ff70850135b24fde0d11578498cd83ee4a79930702d3e19f5534c90ba195c2"} Dec 05 12:06:43 crc kubenswrapper[5014]: I1205 12:06:43.415650 5014 generic.go:334] "Generic (PLEG): container finished" podID="ab0e380d-021e-449b-995d-36648612a6b7" containerID="7481ce399758b246465278248d809f8096dc49d26bb65a28043e743118d1e77d" exitCode=0 Dec 05 12:06:43 crc kubenswrapper[5014]: I1205 12:06:43.415713 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57st4" event={"ID":"ab0e380d-021e-449b-995d-36648612a6b7","Type":"ContainerDied","Data":"7481ce399758b246465278248d809f8096dc49d26bb65a28043e743118d1e77d"} Dec 05 12:06:45 crc kubenswrapper[5014]: I1205 12:06:45.318549 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:06:45 crc kubenswrapper[5014]: E1205 12:06:45.319220 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:06:45 crc kubenswrapper[5014]: I1205 12:06:45.442184 5014 generic.go:334] "Generic (PLEG): container finished" podID="ab0e380d-021e-449b-995d-36648612a6b7" containerID="2a8f2367dbb7b3ee9204f27b28212c97cc456741824c967285e1b7049b7b6d4b" exitCode=0 Dec 05 12:06:45 crc kubenswrapper[5014]: I1205 12:06:45.442231 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57st4" event={"ID":"ab0e380d-021e-449b-995d-36648612a6b7","Type":"ContainerDied","Data":"2a8f2367dbb7b3ee9204f27b28212c97cc456741824c967285e1b7049b7b6d4b"} Dec 05 12:06:47 crc kubenswrapper[5014]: I1205 12:06:47.462458 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57st4" event={"ID":"ab0e380d-021e-449b-995d-36648612a6b7","Type":"ContainerStarted","Data":"77d4a3d2e91718e4b53ffa43fc2d7f882da707b289479d196cb2c9c23c477662"} Dec 05 12:06:51 crc kubenswrapper[5014]: I1205 12:06:51.060284 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:51 crc kubenswrapper[5014]: I1205 12:06:51.060647 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:51 crc kubenswrapper[5014]: I1205 12:06:51.114870 5014 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:06:51 crc kubenswrapper[5014]: I1205 12:06:51.141745 5014 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-57st4" podStartSLOduration=7.672145856 podStartE2EDuration="11.14172269s" podCreationTimestamp="2025-12-05 12:06:40 +0000 UTC" firstStartedPulling="2025-12-05 12:06:43.418483053 +0000 UTC m=+4730.366600757" lastFinishedPulling="2025-12-05 12:06:46.888059887 +0000 UTC m=+4733.836177591" observedRunningTime="2025-12-05 12:06:47.492580612 +0000 UTC m=+4734.440698326" watchObservedRunningTime="2025-12-05 12:06:51.14172269 +0000 UTC m=+4738.089840404" Dec 05 12:06:57 crc kubenswrapper[5014]: I1205 12:06:57.318823 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:06:57 crc kubenswrapper[5014]: E1205 12:06:57.319745 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:07:01 crc kubenswrapper[5014]: I1205 12:07:01.112297 5014 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:07:01 crc kubenswrapper[5014]: I1205 12:07:01.166966 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-57st4"] Dec 05 12:07:01 crc kubenswrapper[5014]: I1205 12:07:01.790371 5014 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-57st4" podUID="ab0e380d-021e-449b-995d-36648612a6b7" containerName="registry-server" containerID="cri-o://77d4a3d2e91718e4b53ffa43fc2d7f882da707b289479d196cb2c9c23c477662" gracePeriod=2 Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.800705 5014 generic.go:334] "Generic (PLEG): container finished" podID="ab0e380d-021e-449b-995d-36648612a6b7" containerID="77d4a3d2e91718e4b53ffa43fc2d7f882da707b289479d196cb2c9c23c477662" exitCode=0 Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.800792 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57st4" event={"ID":"ab0e380d-021e-449b-995d-36648612a6b7","Type":"ContainerDied","Data":"77d4a3d2e91718e4b53ffa43fc2d7f882da707b289479d196cb2c9c23c477662"} Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.801154 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57st4" event={"ID":"ab0e380d-021e-449b-995d-36648612a6b7","Type":"ContainerDied","Data":"86ff70850135b24fde0d11578498cd83ee4a79930702d3e19f5534c90ba195c2"} Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.801169 5014 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86ff70850135b24fde0d11578498cd83ee4a79930702d3e19f5534c90ba195c2" Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.809638 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.895126 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdxr9\" (UniqueName: \"kubernetes.io/projected/ab0e380d-021e-449b-995d-36648612a6b7-kube-api-access-kdxr9\") pod \"ab0e380d-021e-449b-995d-36648612a6b7\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.895197 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-utilities\") pod \"ab0e380d-021e-449b-995d-36648612a6b7\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.895257 5014 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-catalog-content\") pod \"ab0e380d-021e-449b-995d-36648612a6b7\" (UID: \"ab0e380d-021e-449b-995d-36648612a6b7\") " Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.896004 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-utilities" (OuterVolumeSpecName: "utilities") pod "ab0e380d-021e-449b-995d-36648612a6b7" (UID: "ab0e380d-021e-449b-995d-36648612a6b7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.901263 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab0e380d-021e-449b-995d-36648612a6b7-kube-api-access-kdxr9" (OuterVolumeSpecName: "kube-api-access-kdxr9") pod "ab0e380d-021e-449b-995d-36648612a6b7" (UID: "ab0e380d-021e-449b-995d-36648612a6b7"). InnerVolumeSpecName "kube-api-access-kdxr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.917103 5014 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab0e380d-021e-449b-995d-36648612a6b7" (UID: "ab0e380d-021e-449b-995d-36648612a6b7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.997768 5014 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.997805 5014 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0e380d-021e-449b-995d-36648612a6b7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:07:02 crc kubenswrapper[5014]: I1205 12:07:02.997819 5014 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdxr9\" (UniqueName: \"kubernetes.io/projected/ab0e380d-021e-449b-995d-36648612a6b7-kube-api-access-kdxr9\") on node \"crc\" DevicePath \"\"" Dec 05 12:07:03 crc kubenswrapper[5014]: I1205 12:07:03.809823 5014 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57st4" Dec 05 12:07:03 crc kubenswrapper[5014]: I1205 12:07:03.845055 5014 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-57st4"] Dec 05 12:07:03 crc kubenswrapper[5014]: I1205 12:07:03.857470 5014 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-57st4"] Dec 05 12:07:05 crc kubenswrapper[5014]: I1205 12:07:05.327596 5014 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab0e380d-021e-449b-995d-36648612a6b7" path="/var/lib/kubelet/pods/ab0e380d-021e-449b-995d-36648612a6b7/volumes" Dec 05 12:07:09 crc kubenswrapper[5014]: I1205 12:07:09.318813 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:07:09 crc kubenswrapper[5014]: E1205 12:07:09.319624 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:07:20 crc kubenswrapper[5014]: I1205 12:07:20.319085 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:07:20 crc kubenswrapper[5014]: E1205 12:07:20.319902 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:07:31 crc kubenswrapper[5014]: I1205 12:07:31.318287 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:07:31 crc kubenswrapper[5014]: E1205 12:07:31.318955 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:07:46 crc kubenswrapper[5014]: I1205 12:07:46.318425 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:07:46 crc kubenswrapper[5014]: E1205 12:07:46.319474 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:08:00 crc kubenswrapper[5014]: I1205 12:08:00.318173 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:08:00 crc kubenswrapper[5014]: E1205 12:08:00.318935 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:08:13 crc kubenswrapper[5014]: I1205 12:08:13.324749 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:08:13 crc kubenswrapper[5014]: E1205 12:08:13.325697 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:08:28 crc kubenswrapper[5014]: I1205 12:08:28.319414 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:08:28 crc kubenswrapper[5014]: E1205 12:08:28.320146 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:08:40 crc kubenswrapper[5014]: I1205 12:08:40.318172 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:08:40 crc kubenswrapper[5014]: E1205 12:08:40.319037 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:08:52 crc kubenswrapper[5014]: I1205 12:08:52.318871 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:08:52 crc kubenswrapper[5014]: E1205 12:08:52.320892 5014 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-cvtv5_openshift-machine-config-operator(0c07b133-0b3c-4d10-95f9-23167e184681)\"" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" podUID="0c07b133-0b3c-4d10-95f9-23167e184681" Dec 05 12:09:07 crc kubenswrapper[5014]: I1205 12:09:07.318931 5014 scope.go:117] "RemoveContainer" containerID="ed2fb150d743041f5de90b702a7ede4c9035e5c413290fc9c23a20219b2910ea" Dec 05 12:09:07 crc kubenswrapper[5014]: I1205 12:09:07.941996 5014 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cvtv5" event={"ID":"0c07b133-0b3c-4d10-95f9-23167e184681","Type":"ContainerStarted","Data":"396a04eead439290ee8b3a7178bdc5652d5886f6dc740287005f5a851342a9eb"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114545646024460 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114545647017376 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114533575016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114533575015466 5ustar corecore